merge with stable to begin 4.5 freeze
# no-check-commit because it's a clean merge
--- a/.hgignore Mon Jan 08 16:07:51 2018 -0800
+++ b/.hgignore Mon Jan 22 17:53:02 2018 -0500
@@ -24,6 +24,7 @@
tests/.hypothesis
tests/hypothesis-generated
tests/annotated
+tests/exceptions
tests/*.err
tests/htmlcov
build
@@ -55,6 +56,8 @@
locale/*/LC_MESSAGES/hg.mo
hgext/__index__.py
+rust/target/
+
# Generated wheels
wheelhouse/
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.jshintrc Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,11 @@
+{
+ // Enforcing
+ "eqeqeq" : true, // true: Require triple equals (===) for comparison
+ "forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
+ "freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
+ "nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
+ "undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
+
+ // Environments
+ "browser" : true // Web Browser (window, document, etc)
+}
--- a/Makefile Mon Jan 08 16:07:51 2018 -0800
+++ b/Makefile Mon Jan 22 17:53:02 2018 -0500
@@ -124,7 +124,7 @@
format-c:
clang-format --style file -i \
- `hg files 'set:(**.c or **.h) and not "listfile:contrib/clang-format-blacklist"'`
+ `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-blacklist"'`
update-pot: i18n/hg.pot
--- a/contrib/bash_completion Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/bash_completion Mon Jan 22 17:53:02 2018 -0500
@@ -296,7 +296,7 @@
merge)
_hg_labels
;;
- commit|ci|record)
+ commit|ci|record|amend)
_hg_status "mar"
;;
remove|rm)
@@ -309,7 +309,7 @@
_hg_status "mar"
;;
revert)
- _hg_debugpathcomplete
+ _hg_status "mard"
;;
clone)
local count=$(_hg_count_non_option)
--- a/contrib/check-code.py Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/check-code.py Mon Jan 22 17:53:02 2018 -0500
@@ -135,7 +135,6 @@
(r'if\s*!', "don't use '!' to negate exit status"),
(r'/dev/u?random', "don't use entropy, use /dev/zero"),
(r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
- (r'^( *)\t', "don't use tabs to indent"),
(r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
"put a backslash-escaped newline after sed 'i' command"),
(r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
@@ -148,7 +147,9 @@
(r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
(r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
(r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
- (r'grep.* -[ABC] ', "don't use grep's context flags"),
+ (r'grep.* -[ABC]', "don't use grep's context flags"),
+ (r'find.*-printf',
+ "don't use 'find -printf', it doesn't exist on BSD find(1)"),
],
# warnings
[
@@ -165,7 +166,6 @@
(r"<<(\S+)((.|\n)*?\n\1)", rephere),
]
-winglobmsg = "use (glob) to match Windows paths too"
uprefix = r"^ \$ "
utestpats = [
[
@@ -181,25 +181,11 @@
(uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
"as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
'# no-msys'), # in test-pull.t which is skipped on windows
- (r'^ saved backup bundle to \$TESTTMP.*\.hg$', winglobmsg),
- (r'^ changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
- winglobmsg),
- (r'^ pulling from \$TESTTMP/.*[^)]$', winglobmsg,
- '\$TESTTMP/unix-repo$'), # in test-issue1802.t which skipped on windows
- (r'^ reverting (?!subrepo ).*/.*[^)]$', winglobmsg),
- (r'^ cloning subrepo \S+/.*[^)]$', winglobmsg),
- (r'^ pushing to \$TESTTMP/.*[^)]$', winglobmsg),
- (r'^ pushing subrepo \S+/\S+ to.*[^)]$', winglobmsg),
- (r'^ moving \S+/.*[^)]$', winglobmsg),
- (r'^ no changes made to subrepo since.*/.*[^)]$', winglobmsg),
- (r'^ .*: largefile \S+ not available from file:.*/.*[^)]$', winglobmsg),
- (r'^ .*file://\$TESTTMP',
- 'write "file:/*/$TESTTMP" + (glob) to match on windows too'),
(r'^ [^$>].*27\.0\.0\.1',
'use $LOCALIP not an explicit loopback address'),
- (r'^ [^$>].*\$LOCALIP.*[^)]$',
+ (r'^ (?![>$] ).*\$LOCALIP.*[^)]$',
'mark $LOCALIP output lines with (glob) to help tests in BSD jails'),
- (r'^ (cat|find): .*: No such file or directory',
+ (r'^ (cat|find): .*: \$ENOENT\$',
'use test -f to test for file existence'),
(r'^ diff -[^ -]*p',
"don't use (external) diff with -p for portability"),
@@ -223,6 +209,7 @@
]
]
+# transform plain test rules to unified test's
for i in [0, 1]:
for tp in testpats[i]:
p = tp[0]
@@ -233,6 +220,11 @@
p = r"^ [$>] .*(%s)" % p
utestpats[i].append((p, m) + tp[2:])
+# don't transform the following rules:
+# " > \t" and " \t" should be allowed in unified tests
+testpats[0].append((r'^( *)\t', "don't use tabs to indent"))
+utestpats[0].append((r'^( ?)\t', "don't use tabs to indent"))
+
utestfilters = [
(r"<<(\S+)((.|\n)*?\n > \1)", rephere),
(r"( +)(#([^!][^\n]*\S)?)", repcomment),
--- a/contrib/debian/copyright Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/debian/copyright Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
Source: https://www.mercurial-scm.org/
Files: *
-Copyright: 2005-2017, Matt Mackall <mpm@selenic.com> and others.
+Copyright: 2005-2018, Matt Mackall <mpm@selenic.com> and others.
License: GPL-2+
This program is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/fuzz/Makefile Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,20 @@
+bdiff.o: ../../mercurial/bdiff.c
+ clang -g -O1 -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
+ ../../mercurial/bdiff.c
+
+bdiff: bdiff.cc bdiff.o
+ clang -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
+ -I../../mercurial bdiff.cc bdiff.o -o bdiff
+
+bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
+ $$CC $$CFLAGS -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
+
+bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o
+ $$CXX $$CXXFLAGS -std=c++11 -I../../mercurial bdiff.cc \
+ bdiff-oss-fuzz.o -lFuzzingEngine -o $$OUT/bdiff_fuzzer
+
+all: bdiff
+
+oss-fuzz: bdiff_fuzzer
+
+.PHONY: all oss-fuzz
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/fuzz/bdiff.cc Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,49 @@
+/*
+ * bdiff.cc - fuzzer harness for bdiff.c
+ *
+ * Copyright 2018, Google Inc.
+ *
+ * This software may be used and distributed according to the terms of
+ * the GNU General Public License, incorporated herein by reference.
+ */
+#include <stdlib.h>
+
+extern "C" {
+#include "bdiff.h"
+
+int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
+{
+ if (!Size) {
+ return 0;
+ }
+ // figure out a random point in [0, Size] to split our input.
+ size_t split = Data[0] / 255.0 * Size;
+
+ // left input to diff is data[1:split]
+ const uint8_t *left = Data + 1;
+ // which has len split-1
+ size_t left_size = split - 1;
+ // right starts at the next byte after left ends
+ const uint8_t *right = left + left_size;
+ size_t right_size = Size - split;
+
+ struct bdiff_line *a, *b;
+ int an = bdiff_splitlines((const char *)left, split - 1, &a);
+ int bn = bdiff_splitlines((const char *)right, right_size, &b);
+ struct bdiff_hunk l;
+ bdiff_diff(a, an, b, bn, &l);
+ free(a);
+ free(b);
+ bdiff_freehunks(l.next);
+ return 0; // Non-zero return values are reserved for future use.
+}
+
+#ifdef HG_FUZZER_INCLUDE_MAIN
+int main(int argc, char **argv)
+{
+ const char data[] = "asdf";
+ return LLVMFuzzerTestOneInput((const uint8_t *)data, 4);
+}
+#endif
+
+} // extern "C"
--- a/contrib/perf.py Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/perf.py Mon Jan 22 17:53:02 2018 -0500
@@ -25,6 +25,7 @@
import random
import struct
import sys
+import threading
import time
from mercurial import (
changegroup,
@@ -488,6 +489,122 @@
timer(d)
fm.end()
+@command('perfbundleread', formatteropts, 'BUNDLE')
+def perfbundleread(ui, repo, bundlepath, **opts):
+ """Benchmark reading of bundle files.
+
+ This command is meant to isolate the I/O part of bundle reading as
+ much as possible.
+ """
+ from mercurial import (
+ bundle2,
+ exchange,
+ streamclone,
+ )
+
+ def makebench(fn):
+ def run():
+ with open(bundlepath, 'rb') as fh:
+ bundle = exchange.readbundle(ui, fh, bundlepath)
+ fn(bundle)
+
+ return run
+
+ def makereadnbytes(size):
+ def run():
+ with open(bundlepath, 'rb') as fh:
+ bundle = exchange.readbundle(ui, fh, bundlepath)
+ while bundle.read(size):
+ pass
+
+ return run
+
+ def makestdioread(size):
+ def run():
+ with open(bundlepath, 'rb') as fh:
+ while fh.read(size):
+ pass
+
+ return run
+
+ # bundle1
+
+ def deltaiter(bundle):
+ for delta in bundle.deltaiter():
+ pass
+
+ def iterchunks(bundle):
+ for chunk in bundle.getchunks():
+ pass
+
+ # bundle2
+
+ def forwardchunks(bundle):
+ for chunk in bundle._forwardchunks():
+ pass
+
+ def iterparts(bundle):
+ for part in bundle.iterparts():
+ pass
+
+ def iterpartsseekable(bundle):
+ for part in bundle.iterparts(seekable=True):
+ pass
+
+ def seek(bundle):
+ for part in bundle.iterparts(seekable=True):
+ part.seek(0, os.SEEK_END)
+
+ def makepartreadnbytes(size):
+ def run():
+ with open(bundlepath, 'rb') as fh:
+ bundle = exchange.readbundle(ui, fh, bundlepath)
+ for part in bundle.iterparts():
+ while part.read(size):
+ pass
+
+ return run
+
+ benches = [
+ (makestdioread(8192), 'read(8k)'),
+ (makestdioread(16384), 'read(16k)'),
+ (makestdioread(32768), 'read(32k)'),
+ (makestdioread(131072), 'read(128k)'),
+ ]
+
+ with open(bundlepath, 'rb') as fh:
+ bundle = exchange.readbundle(ui, fh, bundlepath)
+
+ if isinstance(bundle, changegroup.cg1unpacker):
+ benches.extend([
+ (makebench(deltaiter), 'cg1 deltaiter()'),
+ (makebench(iterchunks), 'cg1 getchunks()'),
+ (makereadnbytes(8192), 'cg1 read(8k)'),
+ (makereadnbytes(16384), 'cg1 read(16k)'),
+ (makereadnbytes(32768), 'cg1 read(32k)'),
+ (makereadnbytes(131072), 'cg1 read(128k)'),
+ ])
+ elif isinstance(bundle, bundle2.unbundle20):
+ benches.extend([
+ (makebench(forwardchunks), 'bundle2 forwardchunks()'),
+ (makebench(iterparts), 'bundle2 iterparts()'),
+ (makebench(iterpartsseekable), 'bundle2 iterparts() seekable'),
+ (makebench(seek), 'bundle2 part seek()'),
+ (makepartreadnbytes(8192), 'bundle2 part read(8k)'),
+ (makepartreadnbytes(16384), 'bundle2 part read(16k)'),
+ (makepartreadnbytes(32768), 'bundle2 part read(32k)'),
+ (makepartreadnbytes(131072), 'bundle2 part read(128k)'),
+ ])
+ elif isinstance(bundle, streamclone.streamcloneapplier):
+ raise error.Abort('stream clone bundles not supported')
+ else:
+ raise error.Abort('unhandled bundle type: %s' % type(bundle))
+
+ for fn, title in benches:
+ timer, fm = gettimer(ui, opts)
+ timer(fn, title=title)
+ fm.end()
+
@command('perfchangegroupchangelog', formatteropts +
[('', 'version', '02', 'changegroup version'),
('r', 'rev', '', 'revisions to add to changegroup')])
@@ -525,8 +642,8 @@
dirstate = repo.dirstate
'a' in dirstate
def d():
- dirstate.dirs()
- del dirstate._map.dirs
+ dirstate.hasdir('a')
+ del dirstate._map._dirs
timer(d)
fm.end()
@@ -545,8 +662,8 @@
timer, fm = gettimer(ui, opts)
"a" in repo.dirstate
def d():
- "a" in repo.dirstate._map.dirs
- del repo.dirstate._map.dirs
+ repo.dirstate.hasdir("a")
+ del repo.dirstate._map._dirs
timer(d)
fm.end()
@@ -569,7 +686,7 @@
def d():
dirstate._map.dirfoldmap.get('a')
del dirstate._map.dirfoldmap
- del dirstate._map.dirs
+ del dirstate._map._dirs
timer(d)
fm.end()
@@ -817,11 +934,25 @@
timer(d)
fm.end()
+def _bdiffworker(q, ready, done):
+ while not done.is_set():
+ pair = q.get()
+ while pair is not None:
+ mdiff.textdiff(*pair)
+ q.task_done()
+ pair = q.get()
+ q.task_done() # for the None one
+ with ready:
+ ready.wait()
+
@command('perfbdiff', revlogopts + formatteropts + [
('', 'count', 1, 'number of revisions to test (when using --startrev)'),
- ('', 'alldata', False, 'test bdiffs for all associated revisions')],
+ ('', 'alldata', False, 'test bdiffs for all associated revisions'),
+ ('', 'threads', 0, 'number of thread to use (disable with 0)'),
+ ],
+
'-c|-m|FILE REV')
-def perfbdiff(ui, repo, file_, rev=None, count=None, **opts):
+def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
"""benchmark a bdiff between revisions
By default, benchmark a bdiff between its delta parent and itself.
@@ -867,14 +998,39 @@
dp = r.deltaparent(rev)
textpairs.append((r.revision(dp), r.revision(rev)))
- def d():
- for pair in textpairs:
- mdiff.textdiff(*pair)
-
+ withthreads = threads > 0
+ if not withthreads:
+ def d():
+ for pair in textpairs:
+ mdiff.textdiff(*pair)
+ else:
+ q = util.queue()
+ for i in xrange(threads):
+ q.put(None)
+ ready = threading.Condition()
+ done = threading.Event()
+ for i in xrange(threads):
+ threading.Thread(target=_bdiffworker, args=(q, ready, done)).start()
+ q.join()
+ def d():
+ for pair in textpairs:
+ q.put(pair)
+ for i in xrange(threads):
+ q.put(None)
+ with ready:
+ ready.notify_all()
+ q.join()
timer, fm = gettimer(ui, opts)
timer(d)
fm.end()
+ if withthreads:
+ done.set()
+ for i in xrange(threads):
+ q.put(None)
+ with ready:
+ ready.notify_all()
+
@command('perfdiffwd', formatteropts)
def perfdiffwd(ui, repo, **opts):
"""Profile diff of working directory changes"""
--- a/contrib/phabricator.py Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/phabricator.py Mon Jan 22 17:53:02 2018 -0500
@@ -166,7 +166,7 @@
_differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
_differentialrevisiondescre = re.compile(
- '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
+ '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
def getoldnodedrevmap(repo, nodelist):
"""find previous nodes that has been sent to Phabricator
@@ -207,7 +207,7 @@
# Check commit message
m = _differentialrevisiondescre.search(ctx.description())
if m:
- toconfirm[node] = (1, set(precnodes), int(m.group(1)))
+ toconfirm[node] = (1, set(precnodes), int(m.group('id')))
# Double check if tags are genuine by collecting all old nodes from
# Phabricator, and expect precursors overlap with it.
@@ -442,7 +442,7 @@
# Create a local tag to note the association, if commit message
# does not have it already
m = _differentialrevisiondescre.search(ctx.description())
- if not m or int(m.group(1)) != newrevid:
+ if not m or int(m.group('id')) != newrevid:
tagname = 'D%d' % newrevid
tags.tag(repo, tagname, ctx.node(), message=None, user=None,
date=None, local=True)
@@ -865,3 +865,17 @@
params = {'objectIdentifier': drev[r'phid'],
'transactions': actions}
callconduit(repo, 'differential.revision.edit', params)
+
+templatekeyword = registrar.templatekeyword()
+
+@templatekeyword('phabreview')
+def template_review(repo, ctx, revcache, **args):
+ """:phabreview: Object describing the review for this changeset.
+ Has attributes `url` and `id`.
+ """
+ m = _differentialrevisiondescre.search(ctx.description())
+ if m:
+ return {
+ 'url': m.group('url'),
+ 'id': "D{}".format(m.group('id')),
+ }
--- a/contrib/python3-whitelist Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/python3-whitelist Mon Jan 22 17:53:02 2018 -0500
@@ -1,5 +1,8 @@
+test-add.t
+test-addremove-similar.t
test-addremove.t
test-ancestor.py
+test-automv.t
test-backwards-remove.t
test-bheads.t
test-bisect2.t
@@ -7,6 +10,7 @@
test-bookmarks-strip.t
test-branch-tag-confict.t
test-casecollision.t
+test-cat.t
test-changelog-exec.t
test-check-commit.t
test-check-execute.t
@@ -14,7 +18,9 @@
test-check-pyflakes.t
test-check-pylint.t
test-check-shbang.t
+test-children.t
test-commit-unresolved.t
+test-completion.t
test-contrib-check-code.t
test-contrib-check-commit.t
test-debugrename.t
@@ -24,6 +30,8 @@
test-diff-newlines.t
test-diff-reverse.t
test-diff-subdir.t
+test-diffdir.t
+test-directaccess.t
test-dirstate-nonnormalset.t
test-doctest.py
test-double-merge.t
@@ -33,11 +41,17 @@
test-empty.t
test-encoding-func.py
test-excessive-merge.t
+test-execute-bit.t
+test-gpg.t
test-hghave.t
test-imports-checker.t
test-issue1089.t
+test-issue1175.t
+test-issue1502.t
+test-issue1802.t
test-issue1877.t
test-issue1993.t
+test-issue522.t
test-issue612.t
test-issue619.t
test-issue672.t
@@ -46,30 +60,72 @@
test-locate.t
test-lrucachedict.py
test-manifest.py
+test-manifest-merging.t
test-match.py
test-merge-default.t
+test-merge-internal-tools-pattern.t
+test-merge-remove.t
+test-merge-revert.t
+test-merge-revert2.t
+test-merge-subrepos.t
+test-merge10.t
test-merge2.t
test-merge4.t
test-merge5.t
+test-merge6.t
+test-merge7.t
+test-merge8.t
+test-mq-qimport-fail-cleanup.t
+test-obshistory.t
test-permissions.t
+test-push-checkheads-partial-C1.t
+test-push-checkheads-partial-C2.t
+test-push-checkheads-partial-C3.t
+test-push-checkheads-partial-C4.t
test-push-checkheads-pruned-B1.t
+test-push-checkheads-pruned-B2.t
+test-push-checkheads-pruned-B3.t
+test-push-checkheads-pruned-B4.t
+test-push-checkheads-pruned-B5.t
test-push-checkheads-pruned-B6.t
test-push-checkheads-pruned-B7.t
+test-push-checkheads-pruned-B8.t
test-push-checkheads-superceed-A1.t
+test-push-checkheads-superceed-A2.t
+test-push-checkheads-superceed-A3.t
test-push-checkheads-superceed-A4.t
test-push-checkheads-superceed-A5.t
+test-push-checkheads-superceed-A6.t
+test-push-checkheads-superceed-A7.t
test-push-checkheads-superceed-A8.t
test-push-checkheads-unpushed-D1.t
+test-push-checkheads-unpushed-D2.t
+test-push-checkheads-unpushed-D3.t
+test-push-checkheads-unpushed-D4.t
+test-push-checkheads-unpushed-D5.t
test-push-checkheads-unpushed-D6.t
test-push-checkheads-unpushed-D7.t
+test-record.t
+test-rename-dir-merge.t
test-rename-merge1.t
test-rename.t
+test-revert-flags.t
+test-revert-unknown.t
+test-revlog-group-emptyiter.t
+test-revlog-mmapindex.t
test-revlog-packentry.t
test-run-tests.py
test-show-stack.t
+test-simple-update.t
+test-sparse-clear.t
+test-sparse-merges.t
+test-sparse-requirement.t
+test-sparse-verbose-json.t
test-status-terse.t
-test-terse-status.t
+test-uncommit.t
test-unified-test.t
+test-unrelated-pull.t
test-update-issue1456.t
+test-update-names.t
test-update-reverse.t
test-xdg.t
--- a/contrib/showstack.py Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/showstack.py Mon Jan 22 17:53:02 2018 -0500
@@ -1,6 +1,8 @@
# showstack.py - extension to dump a Python stack trace on signal
#
# binds to both SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs)
+"""dump stack trace when receiving SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs)
+"""
from __future__ import absolute_import
import signal
--- a/contrib/synthrepo.py Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/synthrepo.py Mon Jan 22 17:53:02 2018 -0500
@@ -369,14 +369,14 @@
while not validpath(path):
path = pickpath()
data = '%s contents\n' % path
- files[path] = context.memfilectx(repo, path, data)
+ files[path] = data
dir = os.path.dirname(path)
while dir and dir not in dirs:
dirs.add(dir)
dir = os.path.dirname(dir)
def filectxfn(repo, memctx, path):
- return files[path]
+ return context.memfilectx(repo, memctx, path, files[path])
ui.progress(_synthesizing, None)
message = 'synthesized wide repo with %d files' % (len(files),)
@@ -444,14 +444,12 @@
for __ in xrange(add):
lines.insert(random.randint(0, len(lines)), makeline())
path = fctx.path()
- changes[path] = context.memfilectx(repo, path,
- '\n'.join(lines) + '\n')
+ changes[path] = '\n'.join(lines) + '\n'
for __ in xrange(pick(filesremoved)):
path = random.choice(mfk)
for __ in xrange(10):
path = random.choice(mfk)
if path not in changes:
- changes[path] = None
break
if filesadded:
dirs = list(pctx.dirs())
@@ -466,9 +464,11 @@
pathstr = '/'.join(filter(None, path))
data = '\n'.join(makeline()
for __ in xrange(pick(linesinfilesadded))) + '\n'
- changes[pathstr] = context.memfilectx(repo, pathstr, data)
+ changes[pathstr] = data
def filectxfn(repo, memctx, path):
- return changes[path]
+ if path not in changes:
+ return None
+ return context.memfilectx(repo, memctx, path, changes[path])
if not changes:
continue
if revs:
--- a/contrib/win32/ReadMe.html Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/win32/ReadMe.html Mon Jan 22 17:53:02 2018 -0500
@@ -140,7 +140,7 @@
</p>
<p>
- Mercurial is Copyright 2005-2017 Matt Mackall and others. See
+ Mercurial is Copyright 2005-2018 Matt Mackall and others. See
the <tt>Contributors.txt</tt> file for a list of contributors.
</p>
--- a/contrib/win32/mercurial.iss Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/win32/mercurial.iss Mon Jan 22 17:53:02 2018 -0500
@@ -21,7 +21,7 @@
#endif
[Setup]
-AppCopyright=Copyright 2005-2017 Matt Mackall and others
+AppCopyright=Copyright 2005-2018 Matt Mackall and others
AppName=Mercurial
AppVersion={#VERSION}
#if ARCH == "x64"
@@ -45,7 +45,7 @@
DefaultDirName={pf}\Mercurial
SourceDir=..\..
VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
-VersionInfoCopyright=Copyright 2005-2017 Matt Mackall and others
+VersionInfoCopyright=Copyright 2005-2018 Matt Mackall and others
VersionInfoCompany=Matt Mackall and others
InternalCompressLevel=max
SolidCompression=true
Binary file contrib/wix/COPYING.rtf has changed
--- a/contrib/wix/help.wxs Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/wix/help.wxs Mon Jan 22 17:53:02 2018 -0500
@@ -23,6 +23,7 @@
<File Name="environment.txt" />
<File Name="extensions.txt" />
<File Name="filesets.txt" />
+ <File Name="flags.txt" />
<File Name="glossary.txt" />
<File Name="hgignore.txt" />
<File Name="hgweb.txt" />
--- a/contrib/wix/templates.wxs Mon Jan 08 16:07:51 2018 -0800
+++ b/contrib/wix/templates.wxs Mon Jan 22 17:53:02 2018 -0500
@@ -42,6 +42,7 @@
<Directory Id="templates.jsondir" Name="json">
<Component Id="templates.json" Guid="$(var.templates.json.guid)" Win64='$(var.IsX64)'>
<File Id="json.changelist.tmpl" Name="changelist.tmpl" KeyPath="yes" />
+ <File Id="json.graph.tmpl" Name="graph.tmpl" />
<File Id="json.map" Name="map" />
</Component>
</Directory>
@@ -85,6 +86,7 @@
<File Id="gitweb.filerevision.tmpl" Name="filerevision.tmpl" />
<File Id="gitweb.footer.tmpl" Name="footer.tmpl" />
<File Id="gitweb.graph.tmpl" Name="graph.tmpl" />
+ <File Id="gitweb.graphentry.tmpl" Name="graphentry.tmpl" />
<File Id="gitweb.header.tmpl" Name="header.tmpl" />
<File Id="gitweb.index.tmpl" Name="index.tmpl" />
<File Id="gitweb.manifest.tmpl" Name="manifest.tmpl" />
@@ -114,6 +116,7 @@
<File Id="monoblue.filerevision.tmpl" Name="filerevision.tmpl" />
<File Id="monoblue.footer.tmpl" Name="footer.tmpl" />
<File Id="monoblue.graph.tmpl" Name="graph.tmpl" />
+ <File Id="monoblue.graphentry.tmpl" Name="graphentry.tmpl" />
<File Id="monoblue.header.tmpl" Name="header.tmpl" />
<File Id="monoblue.index.tmpl" Name="index.tmpl" />
<File Id="monoblue.manifest.tmpl" Name="manifest.tmpl" />
@@ -143,6 +146,7 @@
<File Id="paper.filerevision.tmpl" Name="filerevision.tmpl" />
<File Id="paper.footer.tmpl" Name="footer.tmpl" />
<File Id="paper.graph.tmpl" Name="graph.tmpl" />
+ <File Id="paper.graphentry.tmpl" Name="graphentry.tmpl" />
<File Id="paper.header.tmpl" Name="header.tmpl" />
<File Id="paper.index.tmpl" Name="index.tmpl" />
<File Id="paper.manifest.tmpl" Name="manifest.tmpl" />
@@ -208,6 +212,7 @@
<File Id="spartan.filerevision.tmpl" Name="filerevision.tmpl" />
<File Id="spartan.footer.tmpl" Name="footer.tmpl" />
<File Id="spartan.graph.tmpl" Name="graph.tmpl" />
+ <File Id="spartan.graphentry.tmpl" Name="graphentry.tmpl" />
<File Id="spartan.header.tmpl" Name="header.tmpl" />
<File Id="spartan.index.tmpl" Name="index.tmpl" />
<File Id="spartan.manifest.tmpl" Name="manifest.tmpl" />
@@ -225,7 +230,6 @@
<File Id="static.background.png" Name="background.png" KeyPath="yes" />
<File Id="static.coal.file.png" Name="coal-file.png" />
<File Id="static.coal.folder.png" Name="coal-folder.png" />
- <File Id="static.excanvas.js" Name="excanvas.js" />
<File Id="static.followlines.js" Name="followlines.js" />
<File Id="static.mercurial.js" Name="mercurial.js" />
<File Id="static.hgicon.png" Name="hgicon.png" />
--- a/hgdemandimport/demandimportpy3.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgdemandimport/demandimportpy3.py Mon Jan 22 17:53:02 2018 -0500
@@ -46,7 +46,7 @@
super().exec_module(module)
# This is 3.6+ because with Python 3.5 it isn't possible to lazily load
-# extensions. See the discussion in https://python.org/sf/26186 for more.
+# extensions. See the discussion in https://bugs.python.org/issue26186 for more.
_extensions_loader = _lazyloaderex.factory(
importlib.machinery.ExtensionFileLoader)
_bytecode_loader = _lazyloaderex.factory(
--- a/hgext/amend.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/amend.py Mon Jan 22 17:53:02 2018 -0500
@@ -17,6 +17,7 @@
cmdutil,
commands,
error,
+ pycompat,
registrar,
)
@@ -46,10 +47,11 @@
See :hg:`help commit` for more details.
"""
+ opts = pycompat.byteskwargs(opts)
if len(opts['note']) > 255:
raise error.Abort(_("cannot store a note of more than 255 bytes"))
with repo.wlock(), repo.lock():
if not opts.get('logfile'):
opts['message'] = opts.get('message') or repo['.'].description()
opts['amend'] = True
- return commands._docommit(ui, repo, *pats, **opts)
+ return commands._docommit(ui, repo, *pats, **pycompat.strkwargs(opts))
--- a/hgext/automv.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/automv.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,6 +32,7 @@
copies,
error,
extensions,
+ pycompat,
registrar,
scmutil,
similar
@@ -53,6 +54,7 @@
def mvcheck(orig, ui, repo, *pats, **opts):
"""Hook to check for moves at commit time"""
+ opts = pycompat.byteskwargs(opts)
renames = None
disabled = opts.pop('no_automv', False)
if not disabled:
@@ -68,7 +70,7 @@
with repo.wlock():
if renames is not None:
scmutil._markchanges(repo, (), (), renames)
- return orig(ui, repo, *pats, **opts)
+ return orig(ui, repo, *pats, **pycompat.strkwargs(opts))
def _interestingfiles(repo, matcher):
"""Find what files were added or removed in this commit.
--- a/hgext/blackbox.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/blackbox.py Mon Jan 22 17:53:02 2018 -0500
@@ -44,6 +44,7 @@
from mercurial.node import hex
from mercurial import (
+ encoding,
registrar,
ui as uimod,
util,
@@ -129,6 +130,11 @@
def track(self):
return self.configlist('blackbox', 'track')
+ def debug(self, *msg, **opts):
+ super(blackboxui, self).debug(*msg, **opts)
+ if self.debugflag:
+ self.log('debug', '%s', ''.join(msg))
+
def log(self, event, *msg, **opts):
global lastui
super(blackboxui, self).log(event, *msg, **opts)
@@ -182,7 +188,7 @@
fp.write(fmt % args)
except (IOError, OSError) as err:
self.debug('warning: cannot write to blackbox.log: %s\n' %
- err.strerror)
+ encoding.strtolocal(err.strerror))
# do not restore _bbinlog intentionally to avoid failed
# logging again
else:
@@ -226,7 +232,7 @@
if not repo.vfs.exists('blackbox.log'):
return
- limit = opts.get('limit')
+ limit = opts.get(r'limit')
fp = repo.vfs('blackbox.log', 'r')
lines = fp.read().split('\n')
--- a/hgext/bugzilla.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/bugzilla.py Mon Jan 22 17:53:02 2018 -0500
@@ -580,7 +580,7 @@
self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
(user, userid) = self.get_bugzilla_user(committer)
- now = time.strftime('%Y-%m-%d %H:%M:%S')
+ now = time.strftime(r'%Y-%m-%d %H:%M:%S')
self.run('''insert into longdescs
(bug_id, who, bug_when, thetext)
values (%s, %s, %s, %s)''',
--- a/hgext/children.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/children.py Mon Jan 22 17:53:02 2018 -0500
@@ -19,6 +19,7 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
+ pycompat,
registrar,
)
@@ -55,6 +56,7 @@
See :hg:`help log` and :hg:`help revsets.children`.
"""
+ opts = pycompat.byteskwargs(opts)
rev = opts.get('rev')
if file_:
fctx = repo.filectx(file_, changeid=rev)
--- a/hgext/churn.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/churn.py Mon Jan 22 17:53:02 2018 -0500
@@ -19,6 +19,7 @@
cmdutil,
encoding,
patch,
+ pycompat,
registrar,
scmutil,
util,
@@ -45,6 +46,7 @@
def countrate(ui, repo, amap, *pats, **opts):
"""Calculate stats"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('dateformat'):
def getkey(ctx):
t, tz = ctx.date()
@@ -154,7 +156,7 @@
return s + " " * (l - encoding.colwidth(s))
amap = {}
- aliases = opts.get('aliases')
+ aliases = opts.get(r'aliases')
if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
aliases = repo.wjoin('.hgchurn')
if aliases:
@@ -172,7 +174,7 @@
if not rate:
return
- if opts.get('sort'):
+ if opts.get(r'sort'):
rate.sort()
else:
rate.sort(key=lambda x: (-sum(x[1]), x))
@@ -185,7 +187,7 @@
ui.debug("assuming %i character terminal\n" % ttywidth)
width = ttywidth - maxname - 2 - 2 - 2
- if opts.get('diffstat'):
+ if opts.get(r'diffstat'):
width -= 15
def format(name, diffstat):
added, removed = diffstat
--- a/hgext/commitextras.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/commitextras.py Mon Jan 22 17:53:02 2018 -0500
@@ -46,7 +46,7 @@
origcommit = repo.commit
try:
def _wrappedcommit(*innerpats, **inneropts):
- extras = opts.get('extra')
+ extras = opts.get(r'extra')
if extras:
for raw in extras:
if '=' not in raw:
@@ -65,7 +65,7 @@
msg = _("key '%s' is used internally, can't be set "
"manually")
raise error.Abort(msg % k)
- inneropts['extra'][k] = v
+ inneropts[r'extra'][k] = v
return origcommit(*innerpats, **inneropts)
# This __dict__ logic is needed because the normal
--- a/hgext/convert/bzr.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/bzr.py Mon Jan 22 17:53:02 2018 -0500
@@ -44,8 +44,8 @@
class bzr_source(common.converter_source):
"""Reads Bazaar repositories by using the Bazaar Python libraries"""
- def __init__(self, ui, path, revs=None):
- super(bzr_source, self).__init__(ui, path, revs=revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ super(bzr_source, self).__init__(ui, repotype, path, revs=revs)
if not os.path.exists(os.path.join(path, '.bzr')):
raise common.NoRepo(_('%s does not look like a Bazaar repository')
--- a/hgext/convert/common.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/common.py Mon Jan 22 17:53:02 2018 -0500
@@ -73,12 +73,13 @@
class converter_source(object):
"""Conversion source interface"""
- def __init__(self, ui, path=None, revs=None):
+ def __init__(self, ui, repotype, path=None, revs=None):
"""Initialize conversion source (or raise NoRepo("message")
exception if path is not a valid repository)"""
self.ui = ui
self.path = path
self.revs = revs
+ self.repotype = repotype
self.encoding = 'utf-8'
@@ -218,7 +219,7 @@
class converter_sink(object):
"""Conversion sink (target) interface"""
- def __init__(self, ui, path):
+ def __init__(self, ui, repotype, path):
"""Initialize conversion sink (or raise NoRepo("message")
exception if path is not a valid repository)
@@ -227,6 +228,7 @@
self.ui = ui
self.path = path
self.created = []
+ self.repotype = repotype
def revmapfile(self):
"""Path to a file that will contain lines
--- a/hgext/convert/convcmd.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/convcmd.py Mon Jan 22 17:53:02 2018 -0500
@@ -6,6 +6,7 @@
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
+import collections
import os
import shlex
import shutil
@@ -15,6 +16,7 @@
encoding,
error,
hg,
+ scmutil,
util,
)
@@ -114,7 +116,7 @@
for name, source, sortmode in source_converters:
try:
if not type or name == type:
- return source(ui, path, revs), sortmode
+ return source(ui, name, path, revs), sortmode
except (NoRepo, MissingTool) as inst:
exceptions.append(inst)
if not ui.quiet:
@@ -128,7 +130,7 @@
for name, sink in sink_converters:
try:
if not type or name == type:
- return sink(ui, path)
+ return sink(ui, name, path)
except NoRepo as inst:
ui.note(_("convert: %s\n") % inst)
except MissingTool as inst:
@@ -289,13 +291,13 @@
revisions without parents. 'parents' must be a mapping of revision
identifier to its parents ones.
"""
- visit = sorted(parents)
+ visit = collections.deque(sorted(parents))
seen = set()
children = {}
roots = []
while visit:
- n = visit.pop(0)
+ n = visit.popleft()
if n in seen:
continue
seen.add(n)
@@ -449,7 +451,7 @@
commit = self.commitcache[rev]
full = self.opts.get('full')
changes = self.source.getchanges(rev, full)
- if isinstance(changes, basestring):
+ if isinstance(changes, bytes):
if changes == SKIPREV:
dest = SKIPREV
else:
@@ -575,6 +577,7 @@
ui.status(_("assuming destination %s\n") % dest)
destc = convertsink(ui, dest, opts.get('dest_type'))
+ destc = scmutil.wrapconvertsink(destc)
try:
srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
--- a/hgext/convert/cvs.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/cvs.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,8 +32,8 @@
NoRepo = common.NoRepo
class convert_cvs(converter_source):
- def __init__(self, ui, path, revs=None):
- super(convert_cvs, self).__init__(ui, path, revs=revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
cvs = os.path.join(path, "CVS")
if not os.path.exists(cvs):
--- a/hgext/convert/darcs.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/darcs.py Mon Jan 22 17:53:02 2018 -0500
@@ -40,8 +40,8 @@
pass
class darcs_source(common.converter_source, common.commandline):
- def __init__(self, ui, path, revs=None):
- common.converter_source.__init__(self, ui, path, revs=revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ common.converter_source.__init__(self, ui, repotype, path, revs=revs)
common.commandline.__init__(self, ui, 'darcs')
# check for _darcs, ElementTree so that we can easily skip
--- a/hgext/convert/filemap.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/filemap.py Mon Jan 22 17:53:02 2018 -0500
@@ -172,7 +172,7 @@
class filemap_source(common.converter_source):
def __init__(self, ui, baseconverter, filemap):
- super(filemap_source, self).__init__(ui)
+ super(filemap_source, self).__init__(ui, baseconverter.repotype)
self.base = baseconverter
self.filemapper = filemapper(ui, filemap)
self.commits = {}
--- a/hgext/convert/git.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/git.py Mon Jan 22 17:53:02 2018 -0500
@@ -66,8 +66,8 @@
def gitpipe(self, *args, **kwargs):
return self._gitcmd(self._run3, *args, **kwargs)
- def __init__(self, ui, path, revs=None):
- super(convert_git, self).__init__(ui, path, revs=revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ super(convert_git, self).__init__(ui, repotype, path, revs=revs)
common.commandline.__init__(self, ui, 'git')
# Pass an absolute path to git to prevent from ever being interpreted
@@ -342,13 +342,15 @@
p = v.split()
tm, tz = p[-2:]
author = " ".join(p[:-2])
- if author[0] == "<": author = author[1:-1]
+ if author[0] == "<":
+ author = author[1:-1]
author = self.recode(author)
if n == "committer":
p = v.split()
tm, tz = p[-2:]
committer = " ".join(p[:-2])
- if committer[0] == "<": committer = committer[1:-1]
+ if committer[0] == "<":
+ committer = committer[1:-1]
committer = self.recode(committer)
if n == "parent":
parents.append(v)
--- a/hgext/convert/gnuarch.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/gnuarch.py Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
-import email
+import email.parser as emailparser
import os
import shutil
import stat
@@ -36,8 +36,8 @@
self.ren_files = {}
self.ren_dirs = {}
- def __init__(self, ui, path, revs=None):
- super(gnuarch_source, self).__init__(ui, path, revs=revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
if not os.path.exists(os.path.join(path, '{arch}')):
raise common.NoRepo(_("%s does not look like a GNU Arch repository")
@@ -63,7 +63,7 @@
self.changes = {}
self.parents = {}
self.tags = {}
- self.catlogparser = email.Parser.Parser()
+ self.catlogparser = emailparser.Parser()
self.encoding = encoding.encoding
self.archives = []
--- a/hgext/convert/hg.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/hg.py Mon Jan 22 17:53:02 2018 -0500
@@ -45,8 +45,8 @@
sha1re = re.compile(r'\b[0-9a-f]{12,40}\b')
class mercurial_sink(common.converter_sink):
- def __init__(self, ui, path):
- common.converter_sink.__init__(self, ui, path)
+ def __init__(self, ui, repotype, path):
+ common.converter_sink.__init__(self, ui, repotype, path)
self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
@@ -253,7 +253,7 @@
data = self._rewritetags(source, revmap, data)
if f == '.hgsubstate':
data = self._rewritesubstate(source, data)
- return context.memfilectx(self.repo, f, data, 'l' in mode,
+ return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
'x' in mode, copies.get(f))
pl = []
@@ -401,7 +401,7 @@
data = "".join(newlines)
def getfilectx(repo, memctx, f):
- return context.memfilectx(repo, f, data, False, False, None)
+ return context.memfilectx(repo, memctx, f, data, False, False, None)
self.ui.status(_("updating tags\n"))
date = "%s 0" % int(time.mktime(time.gmtime()))
@@ -444,8 +444,8 @@
return rev in self.repo
class mercurial_source(common.converter_source):
- def __init__(self, ui, path, revs=None):
- common.converter_source.__init__(self, ui, path, revs)
+ def __init__(self, ui, repotype, path, revs=None):
+ common.converter_source.__init__(self, ui, repotype, path, revs)
self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
self.ignored = set()
self.saverev = ui.configbool('convert', 'hg.saverev')
--- a/hgext/convert/monotone.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/monotone.py Mon Jan 22 17:53:02 2018 -0500
@@ -19,8 +19,8 @@
from . import common
class monotone_source(common.converter_source, common.commandline):
- def __init__(self, ui, path=None, revs=None):
- common.converter_source.__init__(self, ui, path, revs)
+ def __init__(self, ui, repotype, path=None, revs=None):
+ common.converter_source.__init__(self, ui, repotype, path, revs)
if revs and len(revs) > 1:
raise error.Abort(_('monotone source does not support specifying '
'multiple revs'))
--- a/hgext/convert/p4.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/p4.py Mon Jan 22 17:53:02 2018 -0500
@@ -43,11 +43,11 @@
return filename
class p4_source(common.converter_source):
- def __init__(self, ui, path, revs=None):
+ def __init__(self, ui, repotype, path, revs=None):
# avoid import cycle
from . import convcmd
- super(p4_source, self).__init__(ui, path, revs=revs)
+ super(p4_source, self).__init__(ui, repotype, path, revs=revs)
if "/" in path and not path.startswith('//'):
raise common.NoRepo(_('%s does not look like a P4 repository') %
--- a/hgext/convert/subversion.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/convert/subversion.py Mon Jan 22 17:53:02 2018 -0500
@@ -285,8 +285,8 @@
# the parent module. A revision has at most one parent.
#
class svn_source(converter_source):
- def __init__(self, ui, url, revs=None):
- super(svn_source, self).__init__(ui, url, revs=revs)
+ def __init__(self, ui, repotype, url, revs=None):
+ super(svn_source, self).__init__(ui, repotype, url, revs=revs)
if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
(os.path.exists(url) and
@@ -1112,9 +1112,9 @@
def authorfile(self):
return self.join('hg-authormap')
- def __init__(self, ui, path):
+ def __init__(self, ui, repotype, path):
- converter_sink.__init__(self, ui, path)
+ converter_sink.__init__(self, ui, repotype, path)
commandline.__init__(self, ui, 'svn')
self.delete = []
self.setexec = []
--- a/hgext/extdiff.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/extdiff.py Mon Jan 22 17:53:02 2018 -0500
@@ -338,6 +338,7 @@
that revision is compared to the working directory, and, when no
revisions are specified, the working directory files are compared
to its parent.'''
+ opts = pycompat.byteskwargs(opts)
program = opts.get('program')
option = opts.get('option')
if not program:
@@ -369,6 +370,7 @@
self._cmdline = cmdline
def __call__(self, ui, repo, *pats, **opts):
+ opts = pycompat.byteskwargs(opts)
options = ' '.join(map(util.shellquote, opts['option']))
if options:
options = ' ' + options
--- a/hgext/fetch.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/fetch.py Mon Jan 22 17:53:02 2018 -0500
@@ -19,6 +19,7 @@
exchange,
hg,
lock,
+ pycompat,
registrar,
util,
)
@@ -60,6 +61,7 @@
Returns 0 on success.
'''
+ opts = pycompat.byteskwargs(opts)
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
--- a/hgext/fsmonitor/__init__.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/fsmonitor/__init__.py Mon Jan 22 17:53:02 2018 -0500
@@ -117,7 +117,6 @@
from mercurial.i18n import _
from mercurial.node import (
hex,
- nullid,
)
from mercurial import (
@@ -165,9 +164,6 @@
configitem('experimental', 'fsmonitor.transaction_notify',
default=False,
)
-configitem('experimental', 'fsmonitor.wc_change_notify',
- default=False,
-)
# This extension is incompatible with the following blacklisted extensions
# and will disable itself when encountering one of these:
@@ -224,16 +220,21 @@
Whenever full is False, ignored is False, and the Watchman client is
available, use Watchman combined with saved state to possibly return only a
subset of files.'''
- def bail():
+ def bail(reason):
+ self._ui.debug('fsmonitor: fallback to core status, %s\n' % reason)
return orig(match, subrepos, unknown, ignored, full=True)
- if full or ignored or not self._watchmanclient.available():
- return bail()
+ if full:
+ return bail('full rewalk requested')
+ if ignored:
+ return bail('listing ignored files')
+ if not self._watchmanclient.available():
+ return bail('client unavailable')
state = self._fsmonitorstate
clock, ignorehash, notefiles = state.get()
if not clock:
if state.walk_on_invalidate:
- return bail()
+ return bail('no clock')
# Initial NULL clock value, see
# https://facebook.github.io/watchman/docs/clockspec.html
clock = 'c:0:0'
@@ -263,7 +264,7 @@
if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
# ignore list changed -- can't rely on Watchman state any more
if state.walk_on_invalidate:
- return bail()
+ return bail('ignore rules changed')
notefiles = []
clock = 'c:0:0'
else:
@@ -273,7 +274,11 @@
matchfn = match.matchfn
matchalways = match.always()
- dmap = self._map._map
+ dmap = self._map
+ if util.safehasattr(dmap, '_map'):
+ # for better performance, directly access the inner dirstate map if the
+ # standard dirstate implementation is in use.
+ dmap = dmap._map
nonnormalset = self._map.nonnormalset
copymap = self._map.copymap
@@ -334,7 +339,7 @@
except Exception as ex:
_handleunavailable(self._ui, state, ex)
self._watchmanclient.clearconnection()
- return bail()
+ return bail('exception during run')
else:
# We need to propagate the last observed clock up so that we
# can use it for our next query
@@ -342,7 +347,7 @@
if result['is_fresh_instance']:
if state.walk_on_invalidate:
state.invalidate()
- return bail()
+ return bail('fresh instance')
fresh_instance = True
# Ignore any prior noteable files from the state info
notefiles = []
@@ -600,14 +605,6 @@
self._fsmonitorstate.invalidate()
return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
- if dirstate._ui.configbool(
- "experimental", "fsmonitor.wc_change_notify"):
- def setparents(self, p1, p2=nullid):
- with state_update(self._repo, name="hg.wc_change",
- oldnode=self._pl[0], newnode=p1,
- partial=False):
- return super(fsmonitordirstate, self).setparents(p1, p2)
-
dirstate.__class__ = fsmonitordirstate
dirstate._fsmonitorinit(repo)
@@ -662,14 +659,18 @@
self.enter()
def enter(self):
- # We explicitly need to take a lock here, before we proceed to update
- # watchman about the update operation, so that we don't race with
- # some other actor. merge.update is going to take the wlock almost
- # immediately anyway, so this is effectively extending the lock
- # around a couple of short sanity checks.
+ # Make sure we have a wlock prior to sending notifications to watchman.
+ # We don't want to race with other actors. In the update case,
+ # merge.update is going to take the wlock almost immediately. We are
+ # effectively extending the lock around several short sanity checks.
if self.oldnode is None:
self.oldnode = self.repo['.'].node()
- self._lock = self.repo.wlock()
+
+ if self.repo.currentwlock() is None:
+ if util.safehasattr(self.repo, 'wlocknostateupdate'):
+ self._lock = self.repo.wlocknostateupdate()
+ else:
+ self._lock = self.repo.wlock()
self.need_leave = self._state(
'state-enter',
hex(self.oldnode))
@@ -790,32 +791,34 @@
orig = super(fsmonitorrepo, self).status
return overridestatus(orig, self, *args, **kwargs)
- if ui.configbool("experimental", "fsmonitor.transaction_notify"):
- def transaction(self, *args, **kwargs):
- tr = super(fsmonitorrepo, self).transaction(
- *args, **kwargs)
- if tr.count != 1:
- return tr
- stateupdate = state_update(self, name="hg.transaction")
- stateupdate.enter()
+ def wlocknostateupdate(self, *args, **kwargs):
+ return super(fsmonitorrepo, self).wlock(*args, **kwargs)
+
+ def wlock(self, *args, **kwargs):
+ l = super(fsmonitorrepo, self).wlock(*args, **kwargs)
+ if not ui.configbool(
+ "experimental", "fsmonitor.transaction_notify"):
+ return l
+ if l.held != 1:
+ return l
+ origrelease = l.releasefn
- class fsmonitortrans(tr.__class__):
- def _abort(self):
- try:
- result = super(fsmonitortrans, self)._abort()
- finally:
- stateupdate.exit(abort=True)
- return result
+ def staterelease():
+ if origrelease:
+ origrelease()
+ if l.stateupdate:
+ l.stateupdate.exit()
+ l.stateupdate = None
- def close(self):
- try:
- result = super(fsmonitortrans, self).close()
- finally:
- if self.count == 0:
- stateupdate.exit()
- return result
-
- tr.__class__ = fsmonitortrans
- return tr
+ try:
+ l.stateupdate = None
+ l.stateupdate = state_update(self, name="hg.transaction")
+ l.stateupdate.enter()
+ l.releasefn = staterelease
+ except Exception as e:
+ # Swallow any errors; fire and forget
+ self.ui.log(
+ 'watchman', 'Exception in state update %s\n', e)
+ return l
repo.__class__ = fsmonitorrepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/githelp.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,1073 @@
+# githelp.py - Try to map Git commands to Mercurial equivalents.
+#
+# Copyright 2013 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""try mapping git commands to Mercurial commands
+
+Tries to map a given git command to a Mercurial command:
+
+ $ hg githelp -- git checkout master
+ hg update master
+
+If an unknown command or parameter combination is detected, an error is
+produced.
+"""
+
+from __future__ import absolute_import
+
+import getopt
+import re
+
+from mercurial.i18n import _
+from mercurial import (
+ error,
+ fancyopts,
+ registrar,
+ util,
+)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+def convert(s):
+ if s.startswith("origin/"):
+ return s[7:]
+ if 'HEAD' in s:
+ s = s.replace('HEAD', '.')
+ # HEAD~ in git is .~1 in mercurial
+ s = re.sub('~$', '~1', s)
+ return s
+
+@command('^githelp|git', [
+ ], _('hg githelp'))
+def githelp(ui, repo, *args, **kwargs):
+ '''suggests the Mercurial equivalent of the given git command
+
+ Usage: hg githelp -- <git command>
+ '''
+
+ if len(args) == 0 or (len(args) == 1 and args[0] =='git'):
+ raise error.Abort(_('missing git command - '
+ 'usage: hg githelp -- <git command>'))
+
+ if args[0] == 'git':
+ args = args[1:]
+
+ cmd = args[0]
+ if not cmd in gitcommands:
+ raise error.Abort("error: unknown git command %s" % (cmd))
+
+ ui.pager('githelp')
+ args = args[1:]
+ return gitcommands[cmd](ui, repo, *args, **kwargs)
+
+def parseoptions(ui, cmdoptions, args):
+ cmdoptions = list(cmdoptions)
+ opts = {}
+ args = list(args)
+ while True:
+ try:
+ args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
+ break
+ except getopt.GetoptError as ex:
+ flag = None
+ if "requires argument" in ex.msg:
+ raise
+ if ('--' + ex.opt) in ex.msg:
+ flag = '--' + ex.opt
+ elif ('-' + ex.opt) in ex.msg:
+ flag = '-' + ex.opt
+ else:
+ raise error.Abort("unknown option %s" % ex.opt)
+ try:
+ args.remove(flag)
+ except Exception:
+ raise error.Abort(
+ "unknown option {0} packed with other options\n"
+ "Please try passing the option as it's own flag: -{0}" \
+ .format(ex.opt))
+
+ ui.warn(_("ignoring unknown option %s\n") % flag)
+
+ args = list([convert(x) for x in args])
+ opts = dict([(k, convert(v)) if isinstance(v, str) else (k, v)
+ for k, v in opts.iteritems()])
+
+ return args, opts
+
+class Command(object):
+ def __init__(self, name):
+ self.name = name
+ self.args = []
+ self.opts = {}
+
+ def __str__(self):
+ cmd = "hg " + self.name
+ if self.opts:
+ for k, values in sorted(self.opts.iteritems()):
+ for v in values:
+ if v:
+ cmd += " %s %s" % (k, v)
+ else:
+ cmd += " %s" % (k,)
+ if self.args:
+ cmd += " "
+ cmd += " ".join(self.args)
+ return cmd
+
+ def append(self, value):
+ self.args.append(value)
+
+ def extend(self, values):
+ self.args.extend(values)
+
+ def __setitem__(self, key, value):
+ values = self.opts.setdefault(key, [])
+ values.append(value)
+
+ def __and__(self, other):
+ return AndCommand(self, other)
+
+class AndCommand(object):
+ def __init__(self, left, right):
+ self.left = left
+ self.right = right
+
+ def __str__(self):
+ return "%s && %s" % (self.left, self.right)
+
+ def __and__(self, other):
+ return AndCommand(self, other)
+
+def add(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('A', 'all', None, ''),
+ ('p', 'patch', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if (opts.get('patch')):
+ ui.status(_("note: Mercurial will commit when complete, "
+ "as there is no staging area in Mercurial\n\n"))
+ cmd = Command('commit --interactive')
+ else:
+ cmd = Command("add")
+
+ if not opts.get('all'):
+ cmd.extend(args)
+ else:
+ ui.status(_("note: use hg addremove to remove files that have "
+ "been deleted.\n\n"))
+
+ ui.status((str(cmd)), "\n")
+
+def am(ui, repo, *args, **kwargs):
+ cmdoptions=[
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+ cmd = Command('import')
+ ui.status(str(cmd), "\n")
+
+def apply(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('p', 'p', int, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('import --no-commit')
+ if (opts.get('p')):
+ cmd['-p'] = opts.get('p')
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def bisect(ui, repo, *args, **kwargs):
+ ui.status(_("See 'hg help bisect' for how to use bisect.\n\n"))
+
+def blame(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+ cmd = Command('annotate -udl')
+ cmd.extend([convert(v) for v in args])
+ ui.status((str(cmd)), "\n")
+
+def branch(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'set-upstream', None, ''),
+ ('', 'set-upstream-to', '', ''),
+ ('d', 'delete', None, ''),
+ ('D', 'delete', None, ''),
+ ('m', 'move', None, ''),
+ ('M', 'move', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command("bookmark")
+
+ if opts.get('set_upstream') or opts.get('set_upstream_to'):
+ ui.status(_("Mercurial has no concept of upstream branches\n"))
+ return
+ elif opts.get('delete'):
+ cmd = Command("strip")
+ for branch in args:
+ cmd['-B'] = branch
+ else:
+ cmd['-B'] = None
+ elif opts.get('move'):
+ if len(args) > 0:
+ if len(args) > 1:
+ old = args.pop(0)
+ else:
+ # shell command to output the active bookmark for the active
+ # revision
+ old = '`hg log -T"{activebookmark}" -r .`'
+ new = args[0]
+ cmd['-m'] = old
+ cmd.append(new)
+ else:
+ if len(args) > 1:
+ cmd['-r'] = args[1]
+ cmd.append(args[0])
+ elif len(args) == 1:
+ cmd.append(args[0])
+ ui.status((str(cmd)), "\n")
+
+def ispath(repo, string):
+ """
+ The first argument to git checkout can either be a revision or a path. Let's
+ generally assume it's a revision, unless it's obviously a path. There are
+ too many ways to spell revisions in git for us to reasonably catch all of
+ them, so let's be conservative.
+ """
+ if string in repo:
+ # if it's definitely a revision let's not even check if a file of the
+ # same name exists.
+ return False
+
+ cwd = repo.getcwd()
+ if cwd == '':
+ repopath = string
+ else:
+ repopath = cwd + '/' + string
+
+ exists = repo.wvfs.exists(repopath)
+ if exists:
+ return True
+
+ manifest = repo['.'].manifest()
+
+ didexist = (repopath in manifest) or manifest.hasdir(repopath)
+
+ return didexist
+
+def checkout(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('b', 'branch', '', ''),
+ ('B', 'branch', '', ''),
+ ('f', 'force', None, ''),
+ ('p', 'patch', None, ''),
+ ]
+ paths = []
+ if '--' in args:
+ sepindex = args.index('--')
+ paths.extend(args[sepindex + 1:])
+ args = args[:sepindex]
+
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ rev = None
+ if args and ispath(repo, args[0]):
+ paths = args + paths
+ elif args:
+ rev = args[0]
+ paths = args[1:] + paths
+
+ cmd = Command('update')
+
+ if opts.get('force'):
+ if paths or rev:
+ cmd['-C'] = None
+
+ if opts.get('patch'):
+ cmd = Command('revert')
+ cmd['-i'] = None
+
+ if opts.get('branch'):
+ if len(args) == 0:
+ cmd = Command('bookmark')
+ cmd.append(opts.get('branch'))
+ else:
+ cmd.append(args[0])
+ bookcmd = Command('bookmark')
+ bookcmd.append(opts.get('branch'))
+ cmd = cmd & bookcmd
+ # if there is any path argument supplied, use revert instead of update
+ elif len(paths) > 0:
+ ui.status(_("note: use --no-backup to avoid creating .orig files\n\n"))
+ cmd = Command('revert')
+ if opts.get('patch'):
+ cmd['-i'] = None
+ if rev:
+ cmd['-r'] = rev
+ cmd.extend(paths)
+ elif rev:
+ if opts.get('patch'):
+ cmd['-r'] = rev
+ else:
+ cmd.append(rev)
+ elif opts.get('force'):
+ cmd = Command('revert')
+ cmd['--all'] = None
+ else:
+ raise error.Abort("a commit must be specified")
+
+ ui.status((str(cmd)), "\n")
+
+def cherrypick(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'continue', None, ''),
+ ('', 'abort', None, ''),
+ ('e', 'edit', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('graft')
+
+ if opts.get('edit'):
+ cmd['--edit'] = None
+ if opts.get('continue'):
+ cmd['--continue'] = None
+ elif opts.get('abort'):
+ ui.status(_("note: hg graft does not have --abort.\n\n"))
+ return
+ else:
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def clean(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('d', 'd', None, ''),
+ ('f', 'force', None, ''),
+ ('x', 'x', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('purge')
+ if opts.get('x'):
+ cmd['--all'] = None
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def clone(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'bare', None, ''),
+ ('n', 'no-checkout', None, ''),
+ ('b', 'branch', '', ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if len(args) == 0:
+ raise error.Abort("a repository to clone must be specified")
+
+ cmd = Command('clone')
+ cmd.append(args[0])
+ if len(args) > 1:
+ cmd.append(args[1])
+
+ if opts.get('bare'):
+ cmd['-U'] = None
+ ui.status(_("note: Mercurial does not have bare clones. " +
+ "-U will clone the repo without checking out a commit\n\n"))
+ elif opts.get('no_checkout'):
+ cmd['-U'] = None
+
+ if opts.get('branch'):
+ cocmd = Command("update")
+ cocmd.append(opts.get('branch'))
+ cmd = cmd & cocmd
+
+ ui.status((str(cmd)), "\n")
+
+def commit(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('a', 'all', None, ''),
+ ('m', 'message', '', ''),
+ ('p', 'patch', None, ''),
+ ('C', 'reuse-message', '', ''),
+ ('F', 'file', '', ''),
+ ('', 'author', '', ''),
+ ('', 'date', '', ''),
+ ('', 'amend', None, ''),
+ ('', 'no-edit', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('commit')
+ if opts.get('patch'):
+ cmd = Command('commit --interactive')
+
+ if opts.get('amend'):
+ if opts.get('no_edit'):
+ cmd = Command('amend')
+ else:
+ cmd['--amend'] = None
+
+ if opts.get('reuse_message'):
+ cmd['-M'] = opts.get('reuse_message')
+
+ if opts.get('message'):
+ cmd['-m'] = "'%s'" % (opts.get('message'),)
+
+ if opts.get('all'):
+ ui.status(_("note: Mercurial doesn't have a staging area, " +
+ "so there is no --all. -A will add and remove files " +
+ "for you though.\n\n"))
+
+ if opts.get('file'):
+ cmd['-l'] = opts.get('file')
+
+ if opts.get('author'):
+ cmd['-u'] = opts.get('author')
+
+ if opts.get('date'):
+ cmd['-d'] = opts.get('date')
+
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def deprecated(ui, repo, *args, **kwargs):
+ ui.warn(_('This command has been deprecated in the git project, ' +
+ 'thus isn\'t supported by this tool.\n\n'))
+
+def diff(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('a', 'all', None, ''),
+ ('', 'cached', None, ''),
+ ('R', 'reverse', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('diff')
+
+ if opts.get('cached'):
+ ui.status(_('note: Mercurial has no concept of a staging area, ' +
+ 'so --cached does nothing.\n\n'))
+
+ if opts.get('reverse'):
+ cmd['--reverse'] = None
+
+ for a in list(args):
+ args.remove(a)
+ try:
+ repo.revs(a)
+ cmd['-r'] = a
+ except Exception:
+ cmd.append(a)
+
+ ui.status((str(cmd)), "\n")
+
+def difftool(ui, repo, *args, **kwargs):
+ ui.status(_('Mercurial does not enable external difftool by default. You '
+ 'need to enable the extdiff extension in your .hgrc file by adding\n'
+ 'extdiff =\n'
+ 'to the [extensions] section and then running\n\n'
+ 'hg extdiff -p <program>\n\n'
+ 'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
+ 'information.\n'))
+
+def fetch(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'all', None, ''),
+ ('f', 'force', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('pull')
+
+ if len(args) > 0:
+ cmd.append(args[0])
+ if len(args) > 1:
+ ui.status(_("note: Mercurial doesn't have refspecs. " +
+ "-r can be used to specify which commits you want to pull. " +
+ "-B can be used to specify which bookmark you want to pull." +
+ "\n\n"))
+ for v in args[1:]:
+ if v in repo._bookmarks:
+ cmd['-B'] = v
+ else:
+ cmd['-r'] = v
+
+ ui.status((str(cmd)), "\n")
+
+def grep(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('grep')
+
+ # For basic usage, git grep and hg grep are the same. They both have the
+ # pattern first, followed by paths.
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def init(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('init')
+
+ if len(args) > 0:
+ cmd.append(args[0])
+
+ ui.status((str(cmd)), "\n")
+
+def log(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'follow', None, ''),
+ ('', 'decorate', None, ''),
+ ('n', 'number', '', ''),
+ ('1', '1', None, ''),
+ ('', 'pretty', '', ''),
+ ('', 'format', '', ''),
+ ('', 'oneline', None, ''),
+ ('', 'stat', None, ''),
+ ('', 'graph', None, ''),
+ ('p', 'patch', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+ ui.status(_('note: -v prints the entire commit message like Git does. To ' +
+ 'print just the first line, drop the -v.\n\n'))
+ ui.status(_("note: see hg help revset for information on how to filter " +
+ "log output.\n\n"))
+
+ cmd = Command('log')
+ cmd['-v'] = None
+
+ if opts.get('number'):
+ cmd['-l'] = opts.get('number')
+ if opts.get('1'):
+ cmd['-l'] = '1'
+ if opts.get('stat'):
+ cmd['--stat'] = None
+ if opts.get('graph'):
+ cmd['-G'] = None
+ if opts.get('patch'):
+ cmd['-p'] = None
+
+ if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
+ format = opts.get('format', '')
+ if 'format:' in format:
+ ui.status(_("note: --format format:??? equates to Mercurial's " +
+ "--template. See hg help templates for more info.\n\n"))
+ cmd['--template'] = '???'
+ else:
+ ui.status(_("note: --pretty/format/oneline equate to Mercurial's " +
+ "--style or --template. See hg help templates for more info." +
+ "\n\n"))
+ cmd['--style'] = '???'
+
+ if len(args) > 0:
+ if '..' in args[0]:
+ since, until = args[0].split('..')
+ cmd['-r'] = "'%s::%s'" % (since, until)
+ del args[0]
+ cmd.extend(args)
+
+ ui.status((str(cmd)), "\n")
+
+def lsfiles(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('c', 'cached', None, ''),
+ ('d', 'deleted', None, ''),
+ ('m', 'modified', None, ''),
+ ('o', 'others', None, ''),
+ ('i', 'ignored', None, ''),
+ ('s', 'stage', None, ''),
+ ('z', '_zero', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if (opts.get('modified') or opts.get('deleted')
+ or opts.get('others') or opts.get('ignored')):
+ cmd = Command('status')
+ if opts.get('deleted'):
+ cmd['-d'] = None
+ if opts.get('modified'):
+ cmd['-m'] = None
+ if opts.get('others'):
+ cmd['-o'] = None
+ if opts.get('ignored'):
+ cmd['-i'] = None
+ else:
+ cmd = Command('files')
+ if opts.get('stage'):
+ ui.status(_("note: Mercurial doesn't have a staging area, ignoring "
+ "--stage\n"))
+ if opts.get('_zero'):
+ cmd['-0'] = None
+ cmd.append('.')
+ for include in args:
+ cmd['-I'] = util.shellquote(include)
+
+ ui.status((str(cmd)), "\n")
+
+def merge(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('merge')
+
+ if len(args) > 0:
+ cmd.append(args[len(args) - 1])
+
+ ui.status((str(cmd)), "\n")
+
+def mergebase(ui, repo, *args, **kwargs):
+ cmdoptions = []
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if len(args) != 2:
+ args = ['A', 'B']
+
+ cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
+ % (args[0], args[1]))
+
+ ui.status(_('NOTE: ancestors() is part of the revset language.\n'),
+ _("Learn more about revsets with 'hg help revsets'\n\n"))
+ ui.status((str(cmd)), "\n")
+
+def mergetool(ui, repo, *args, **kwargs):
+ cmdoptions = []
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command("resolve")
+
+ if len(args) == 0:
+ cmd['--all'] = None
+ cmd.extend(args)
+ ui.status((str(cmd)), "\n")
+
+def mv(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('f', 'force', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('mv')
+ cmd.extend(args)
+
+ if opts.get('force'):
+ cmd['-f'] = None
+
+ ui.status((str(cmd)), "\n")
+
+def pull(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'all', None, ''),
+ ('f', 'force', None, ''),
+ ('r', 'rebase', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('pull')
+ cmd['--rebase'] = None
+
+ if len(args) > 0:
+ cmd.append(args[0])
+ if len(args) > 1:
+ ui.status(_("note: Mercurial doesn't have refspecs. " +
+ "-r can be used to specify which commits you want to pull. " +
+ "-B can be used to specify which bookmark you want to pull." +
+ "\n\n"))
+ for v in args[1:]:
+ if v in repo._bookmarks:
+ cmd['-B'] = v
+ else:
+ cmd['-r'] = v
+
+ ui.status((str(cmd)), "\n")
+
+def push(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'all', None, ''),
+ ('f', 'force', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('push')
+
+ if len(args) > 0:
+ cmd.append(args[0])
+ if len(args) > 1:
+ ui.status(_("note: Mercurial doesn't have refspecs. " +
+ "-r can be used to specify which commits you want to push. " +
+ "-B can be used to specify which bookmark you want to push." +
+ "\n\n"))
+ for v in args[1:]:
+ if v in repo._bookmarks:
+ cmd['-B'] = v
+ else:
+ cmd['-r'] = v
+
+ if opts.get('force'):
+ cmd['-f'] = None
+
+ ui.status((str(cmd)), "\n")
+
+def rebase(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'all', None, ''),
+ ('i', 'interactive', None, ''),
+ ('', 'onto', '', ''),
+ ('', 'abort', None, ''),
+ ('', 'continue', None, ''),
+ ('', 'skip', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if opts.get('interactive'):
+ ui.status(_("note: hg histedit does not perform a rebase. " +
+ "It just edits history.\n\n"))
+ cmd = Command('histedit')
+ if len(args) > 0:
+ ui.status(_("also note: 'hg histedit' will automatically detect"
+ " your stack, so no second argument is necessary.\n\n"))
+ ui.status((str(cmd)), "\n")
+ return
+
+ if opts.get('skip'):
+ cmd = Command('revert --all -r .')
+ ui.status((str(cmd)), "\n")
+
+ cmd = Command('rebase')
+
+ if opts.get('continue') or opts.get('skip'):
+ cmd['--continue'] = None
+ if opts.get('abort'):
+ cmd['--abort'] = None
+
+ if opts.get('onto'):
+ ui.status(_("note: if you're trying to lift a commit off one branch, " +
+ "try hg rebase -d <destination commit> -s <commit to be lifted>" +
+ "\n\n"))
+ cmd['-d'] = convert(opts.get('onto'))
+ if len(args) < 2:
+ raise error.Abort("Expected format: git rebase --onto X Y Z")
+ cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
+ else:
+ if len(args) == 1:
+ cmd['-d'] = convert(args[0])
+ elif len(args) == 2:
+ cmd['-d'] = convert(args[0])
+ cmd['-b'] = convert(args[1])
+
+ ui.status((str(cmd)), "\n")
+
+def reflog(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'all', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('journal')
+ if opts.get('all'):
+ cmd['--all'] = None
+ if len(args) > 0:
+ cmd.append(args[0])
+
+ ui.status(str(cmd), "\n\n")
+ ui.status(_("note: in hg commits can be deleted from repo but we always"
+ " have backups.\n"))
+
+def reset(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'soft', None, ''),
+ ('', 'hard', None, ''),
+ ('', 'mixed', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ commit = convert(args[0] if len(args) > 0 else '.')
+ hard = opts.get('hard')
+
+ if opts.get('mixed'):
+ ui.status(_('NOTE: --mixed has no meaning since Mercurial has no '
+ 'staging area\n\n'))
+ if opts.get('soft'):
+ ui.status(_('NOTE: --soft has no meaning since Mercurial has no '
+ 'staging area\n\n'))
+
+ cmd = Command('update')
+ if hard:
+ cmd.append('--clean')
+
+ cmd.append(commit)
+
+ ui.status((str(cmd)), "\n")
+
+def revert(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if len(args) > 1:
+ ui.status(_("note: hg backout doesn't support multiple commits at " +
+ "once\n\n"))
+
+ cmd = Command('backout')
+ if args:
+ cmd.append(args[0])
+
+ ui.status((str(cmd)), "\n")
+
+def revparse(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'show-cdup', None, ''),
+ ('', 'show-toplevel', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if opts.get('show_cdup') or opts.get('show_toplevel'):
+ cmd = Command('root')
+ if opts.get('show_cdup'):
+ ui.status(_("note: hg root prints the root of the repository\n\n"))
+ ui.status((str(cmd)), "\n")
+ else:
+ ui.status(_("note: see hg help revset for how to refer to commits\n"))
+
+def rm(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('f', 'force', None, ''),
+ ('n', 'dry-run', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('rm')
+ cmd.extend(args)
+
+ if opts.get('force'):
+ cmd['-f'] = None
+ if opts.get('dry_run'):
+ cmd['-n'] = None
+
+ ui.status((str(cmd)), "\n")
+
+def show(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'name-status', None, ''),
+ ('', 'pretty', '', ''),
+ ('U', 'unified', int, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if opts.get('name_status'):
+ if opts.get('pretty') == 'format:':
+ cmd = Command('status')
+ cmd['--change'] = '.'
+ else:
+ cmd = Command('log')
+ cmd.append('--style status')
+ cmd.append('-r .')
+ elif len(args) > 0:
+ if ispath(repo, args[0]):
+ cmd = Command('cat')
+ else:
+ cmd = Command('export')
+ cmd.extend(args)
+ if opts.get('unified'):
+ cmd.append('--config diff.unified=%d' % (opts['unified'],))
+ elif opts.get('unified'):
+ cmd = Command('export')
+ cmd.append('--config diff.unified=%d' % (opts['unified'],))
+ else:
+ cmd = Command('export')
+
+ ui.status((str(cmd)), "\n")
+
+def stash(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('shelve')
+ action = args[0] if len(args) > 0 else None
+
+ if action == 'list':
+ cmd['-l'] = None
+ elif action == 'drop':
+ cmd['-d'] = None
+ if len(args) > 1:
+ cmd.append(args[1])
+ else:
+ cmd.append('<shelve name>')
+ elif action == 'pop' or action == 'apply':
+ cmd = Command('unshelve')
+ if len(args) > 1:
+ cmd.append(args[1])
+ if action == 'apply':
+ cmd['--keep'] = None
+ elif (action == 'branch' or action == 'show' or action == 'clear'
+ or action == 'create'):
+ ui.status(_("note: Mercurial doesn't have equivalents to the " +
+ "git stash branch, show, clear, or create actions.\n\n"))
+ return
+ else:
+ if len(args) > 0:
+ if args[0] != 'save':
+ cmd['--name'] = args[0]
+ elif len(args) > 1:
+ cmd['--name'] = args[1]
+
+ ui.status((str(cmd)), "\n")
+
+def status(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('', 'ignored', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('status')
+ cmd.extend(args)
+
+ if opts.get('ignored'):
+ cmd['-i'] = None
+
+ ui.status((str(cmd)), "\n")
+
+def svn(ui, repo, *args, **kwargs):
+ svncmd = args[0]
+ if not svncmd in gitsvncommands:
+ ui.warn(_("error: unknown git svn command %s\n") % (svncmd))
+
+ args = args[1:]
+ return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
+
+def svndcommit(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('push')
+
+ ui.status((str(cmd)), "\n")
+
+def svnfetch(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('pull')
+ cmd.append('default-push')
+
+ ui.status((str(cmd)), "\n")
+
+def svnfindrev(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ cmd = Command('log')
+ cmd['-r'] = args[0]
+
+ ui.status((str(cmd)), "\n")
+
+def svnrebase(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('l', 'local', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ pullcmd = Command('pull')
+ pullcmd.append('default-push')
+ rebasecmd = Command('rebase')
+ rebasecmd.append('tip')
+
+ cmd = pullcmd & rebasecmd
+
+ ui.status((str(cmd)), "\n")
+
+def tag(ui, repo, *args, **kwargs):
+ cmdoptions = [
+ ('f', 'force', None, ''),
+ ('l', 'list', None, ''),
+ ('d', 'delete', None, ''),
+ ]
+ args, opts = parseoptions(ui, cmdoptions, args)
+
+ if opts.get('list'):
+ cmd = Command('tags')
+ else:
+ cmd = Command('tag')
+ cmd.append(args[0])
+ if len(args) > 1:
+ cmd['-r'] = args[1]
+
+ if opts.get('delete'):
+ cmd['--remove'] = None
+
+ if opts.get('force'):
+ cmd['-f'] = None
+
+ ui.status((str(cmd)), "\n")
+
+gitcommands = {
+ 'add': add,
+ 'am': am,
+ 'apply': apply,
+ 'bisect': bisect,
+ 'blame': blame,
+ 'branch': branch,
+ 'checkout': checkout,
+ 'cherry-pick': cherrypick,
+ 'clean': clean,
+ 'clone': clone,
+ 'commit': commit,
+ 'diff': diff,
+ 'difftool': difftool,
+ 'fetch': fetch,
+ 'grep': grep,
+ 'init': init,
+ 'log': log,
+ 'ls-files': lsfiles,
+ 'merge': merge,
+ 'merge-base': mergebase,
+ 'mergetool': mergetool,
+ 'mv': mv,
+ 'pull': pull,
+ 'push': push,
+ 'rebase': rebase,
+ 'reflog': reflog,
+ 'reset': reset,
+ 'revert': revert,
+ 'rev-parse': revparse,
+ 'rm': rm,
+ 'show': show,
+ 'stash': stash,
+ 'status': status,
+ 'svn': svn,
+ 'tag': tag,
+ 'whatchanged': deprecated,
+}
+
+gitsvncommands = {
+ 'dcommit': svndcommit,
+ 'fetch': svnfetch,
+ 'find-rev': svnfindrev,
+ 'rebase': svnrebase,
+}
--- a/hgext/gpg.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/gpg.py Mon Jan 22 17:53:02 2018 -0500
@@ -106,7 +106,7 @@
def newgpg(ui, **opts):
"""create a new gpg instance"""
gpgpath = ui.config("gpg", "cmd")
- gpgkey = opts.get('key')
+ gpgkey = opts.get(r'key')
if not gpgkey:
gpgkey = ui.config("gpg", "key")
return gpg(gpgpath, gpgkey)
@@ -253,6 +253,7 @@
def _dosign(ui, repo, *revs, **opts):
mygpg = newgpg(ui, **opts)
+ opts = pycompat.byteskwargs(opts)
sigver = "0"
sigmessage = ""
@@ -312,7 +313,8 @@
% hgnode.short(n)
for n in nodes])
try:
- editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
+ editor = cmdutil.getcommiteditor(editform='gpg.sign',
+ **pycompat.strkwargs(opts))
repo.commit(message, opts['user'], opts['date'], match=msigs,
editor=editor)
except ValueError as inst:
--- a/hgext/graphlog.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/graphlog.py Mon Jan 22 17:53:02 2018 -0500
@@ -66,5 +66,5 @@
This is an alias to :hg:`log -G`.
"""
- opts['graph'] = True
+ opts[r'graph'] = True
return commands.log(ui, repo, *pats, **opts)
--- a/hgext/hgk.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/hgk.py Mon Jan 22 17:53:02 2018 -0500
@@ -48,6 +48,7 @@
commands,
obsolete,
patch,
+ pycompat,
registrar,
scmutil,
util,
@@ -79,6 +80,7 @@
inferrepo=True)
def difftree(ui, repo, node1=None, node2=None, *files, **opts):
"""diff trees from two commits"""
+
def __difftree(repo, node1, node2, files=None):
assert node2 is not None
if files is None:
@@ -102,7 +104,7 @@
##
while True:
- if opts['stdin']:
+ if opts[r'stdin']:
try:
line = util.bytesinput(ui.fin, ui.fout).split(' ')
node1 = line[0]
@@ -118,8 +120,8 @@
else:
node2 = node1
node1 = repo.changelog.parents(node1)[0]
- if opts['patch']:
- if opts['pretty']:
+ if opts[r'patch']:
+ if opts[r'pretty']:
catcommit(ui, repo, node2, "")
m = scmutil.match(repo[node1], files)
diffopts = patch.difffeatureopts(ui)
@@ -130,7 +132,7 @@
ui.write(chunk)
else:
__difftree(repo, node1, node2, files=files)
- if not opts['stdin']:
+ if not opts[r'stdin']:
break
def catcommit(ui, repo, n, prefix, ctx=None):
@@ -183,7 +185,7 @@
# strings
#
prefix = ""
- if opts['stdin']:
+ if opts[r'stdin']:
try:
(type, r) = util.bytesinput(ui.fin, ui.fout).split(' ')
prefix = " "
@@ -201,7 +203,7 @@
return 1
n = repo.lookup(r)
catcommit(ui, repo, n, prefix)
- if opts['stdin']:
+ if opts[r'stdin']:
try:
(type, r) = util.bytesinput(ui.fin, ui.fout).split(' ')
except EOFError:
@@ -340,7 +342,7 @@
else:
full = None
copy = [x for x in revs]
- revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
+ revtree(ui, copy, repo, full, opts[r'max_count'], opts[r'parents'])
@command('view',
[('l', 'limit', '',
@@ -348,6 +350,7 @@
_('[-l LIMIT] [REVRANGE]'))
def view(ui, repo, *etc, **opts):
"start interactive history viewer"
+ opts = pycompat.byteskwargs(opts)
os.chdir(repo.root)
optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
if repo.filtername is None:
--- a/hgext/highlight/highlight.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/highlight/highlight.py Mon Jan 22 17:53:02 2018 -0500
@@ -22,8 +22,12 @@
import pygments
import pygments.formatters
import pygments.lexers
+ import pygments.plugin
import pygments.util
+ for unused in pygments.plugin.find_plugin_lexers():
+ pass
+
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
--- a/hgext/histedit.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/histedit.py Mon Jan 22 17:53:02 2018 -0500
@@ -203,6 +203,7 @@
mergeutil,
node,
obsolete,
+ pycompat,
registrar,
repair,
scmutil,
@@ -542,9 +543,9 @@
def commitfunc(**kwargs):
overrides = {('phases', 'new-commit'): phasemin}
with repo.ui.configoverride(overrides, 'histedit'):
- extra = kwargs.get('extra', {}).copy()
+ extra = kwargs.get(r'extra', {}).copy()
extra['histedit_source'] = src.hex()
- kwargs['extra'] = extra
+ kwargs[r'extra'] = extra
return repo.commit(**kwargs)
return commitfunc
@@ -602,7 +603,7 @@
if path in headmf:
fctx = last[path]
flags = fctx.flags()
- mctx = context.memfilectx(repo,
+ mctx = context.memfilectx(repo, ctx,
fctx.path(), fctx.data(),
islink='l' in flags,
isexec='x' in flags,
@@ -917,7 +918,8 @@
('o', 'outgoing', False, _('changesets not found in destination')),
('f', 'force', False,
_('force outgoing even for unrelated repositories')),
- ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
+ ('r', 'rev', [], _('first revision to be edited'), _('REV'))] +
+ cmdutil.formatteropts,
_("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
def histedit(ui, repo, *freeargs, **opts):
"""interactively edit changeset history
@@ -1094,6 +1096,9 @@
_('histedit requires exactly one ancestor revision'))
def _histedit(ui, repo, state, *freeargs, **opts):
+ opts = pycompat.byteskwargs(opts)
+ fm = ui.formatter('histedit', opts)
+ fm.startitem()
goal = _getgoal(opts)
revs = opts.get('rev', [])
rules = opts.get('commands', '')
@@ -1116,7 +1121,8 @@
_newhistedit(ui, repo, state, revs, freeargs, opts)
_continuehistedit(ui, repo, state)
- _finishhistedit(ui, repo, state)
+ _finishhistedit(ui, repo, state, fm)
+ fm.end()
def _continuehistedit(ui, repo, state):
"""This function runs after either:
@@ -1163,7 +1169,7 @@
state.write()
ui.progress(_("editing"), None)
-def _finishhistedit(ui, repo, state):
+def _finishhistedit(ui, repo, state, fm):
"""This action runs when histedit is finishing its session"""
repo.ui.pushbuffer()
hg.update(repo, state.parentctxnode, quietempty=True)
@@ -1197,6 +1203,13 @@
mapping = {k: v for k, v in mapping.items()
if k in nodemap and all(n in nodemap for n in v)}
scmutil.cleanupnodes(repo, mapping, 'histedit')
+ hf = fm.hexfunc
+ fl = fm.formatlist
+ fd = fm.formatdict
+ nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
+ for oldn, newn in mapping.iteritems()},
+ key="oldnode", value="newnodes")
+ fm.data(nodechanges=nodechanges)
state.clear()
if os.path.exists(repo.sjoin('undo')):
@@ -1297,6 +1310,9 @@
state.topmost = topmost
state.replacements = []
+ ui.log("histedit", "%d actions to histedit", len(actions),
+ histedit_num_actions=len(actions))
+
# Create a backup so we can always abort completely.
backupfile = None
if not obsolete.isenabled(repo, obsolete.createmarkersopt):
--- a/hgext/journal.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/journal.py Mon Jan 22 17:53:02 2018 -0500
@@ -30,6 +30,7 @@
localrepo,
lock,
node,
+ pycompat,
registrar,
util,
)
@@ -133,7 +134,7 @@
Note that by default entries go from most recent to oldest.
"""
- order = kwargs.pop('order', max)
+ order = kwargs.pop(r'order', max)
iterables = [iter(it) for it in iterables]
# this tracks still active iterables; iterables are deleted as they are
# exhausted, which is why this is a dictionary and why each entry also
@@ -303,7 +304,7 @@
# default to 600 seconds timeout
l = lock.lock(
vfs, 'namejournal.lock',
- int(self.ui.config("ui", "timeout")), desc=desc)
+ self.ui.configint("ui", "timeout"), desc=desc)
self.ui.warn(_("got lock after %s seconds\n") % l.delay)
self._lockref = weakref.ref(l)
return l
@@ -458,6 +459,7 @@
`hg journal -T json` can be used to produce machine readable output.
"""
+ opts = pycompat.byteskwargs(opts)
name = '.'
if opts.get('all'):
if args:
@@ -478,6 +480,7 @@
limit = cmdutil.loglimit(opts)
entry = None
+ ui.pager('journal')
for count, entry in enumerate(repo.journal.filtered(name=name)):
if count == limit:
break
--- a/hgext/keyword.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/keyword.py Mon Jan 22 17:53:02 2018 -0500
@@ -104,6 +104,7 @@
match,
patch,
pathutil,
+ pycompat,
registrar,
scmutil,
templatefilters,
@@ -380,6 +381,7 @@
'''Bails out if [keyword] configuration is not active.
Returns status of working directory.'''
if kwt:
+ opts = pycompat.byteskwargs(opts)
return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
unknown=opts.get('unknown') or opts.get('all'))
if ui.configitems('keyword'):
@@ -436,16 +438,16 @@
ui.setconfig('keywordset', 'svn', svn, 'keyword')
uikwmaps = ui.configitems('keywordmaps')
- if args or opts.get('rcfile'):
+ if args or opts.get(r'rcfile'):
ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
if uikwmaps:
ui.status(_('\textending current template maps\n'))
- if opts.get('default') or not uikwmaps:
+ if opts.get(r'default') or not uikwmaps:
if svn:
ui.status(_('\toverriding default svn keywordset\n'))
else:
ui.status(_('\toverriding default cvs keywordset\n'))
- if opts.get('rcfile'):
+ if opts.get(r'rcfile'):
ui.readconfig(opts.get('rcfile'))
if args:
# simulate hgrc parsing
@@ -453,7 +455,7 @@
repo.vfs.write('hgrc', rcmaps)
ui.readconfig(repo.vfs.join('hgrc'))
kwmaps = dict(ui.configitems('keywordmaps'))
- elif opts.get('default'):
+ elif opts.get(r'default'):
if svn:
ui.status(_('\n\tconfiguration using default svn keywordset\n'))
else:
@@ -543,6 +545,7 @@
else:
cwd = ''
files = []
+ opts = pycompat.byteskwargs(opts)
if not opts.get('unknown') or opts.get('all'):
files = sorted(status.modified + status.added + status.clean)
kwfiles = kwt.iskwfile(files, wctx)
--- a/hgext/largefiles/lfcommands.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/lfcommands.py Mon Jan 22 17:53:02 2018 -0500
@@ -24,6 +24,7 @@
lock,
match as matchmod,
node,
+ pycompat,
registrar,
scmutil,
util,
@@ -74,6 +75,7 @@
Use --to-normal to convert largefiles back to normal files; after
this, the DEST repository can be used without largefiles at all.'''
+ opts = pycompat.byteskwargs(opts)
if opts['to_normal']:
tolfile = False
else:
@@ -177,7 +179,7 @@
convcmd.converter = converter
try:
- convcmd.convert(ui, src, dest)
+ convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg')
finally:
convcmd.converter = orig
success = True
@@ -259,7 +261,8 @@
# doesn't change after rename or copy
renamed = lfutil.standin(renamed[0])
- return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
+ return context.memfilectx(repo, memctx, f,
+ lfiletohash[srcfname] + '\n',
'l' in fctx.flags(), 'x' in fctx.flags(),
renamed)
else:
@@ -311,7 +314,7 @@
data = fctx.data()
if f == '.hgtags':
data = _converttags (repo.ui, revmap, data)
- return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
+ return context.memfilectx(repo, ctx, f, data, 'l' in fctx.flags(),
'x' in fctx.flags(), renamed)
# Remap tag data using a revision map
@@ -579,7 +582,7 @@
"""
repo.lfpullsource = source
- revs = opts.get('rev', [])
+ revs = opts.get(r'rev', [])
if not revs:
raise error.Abort(_('no revisions specified'))
revs = scmutil.revrange(repo, revs)
@@ -590,3 +593,12 @@
(cached, missing) = cachelfiles(ui, repo, rev)
numcached += len(cached)
ui.status(_("%d largefiles cached\n") % numcached)
+
+@command('debuglfput',
+ [] + cmdutil.remoteopts,
+ _('FILE'))
+def debuglfput(ui, repo, filepath, **kwargs):
+ hash = lfutil.hashfile(filepath)
+ storefactory.openstore(repo).put(filepath, hash)
+ ui.write('%s\n' % hash)
+ return 0
--- a/hgext/largefiles/lfutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/lfutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -69,31 +69,31 @@
to preserve download bandwidth and storage space.'''
return os.path.join(_usercachedir(ui), hash)
-def _usercachedir(ui):
+def _usercachedir(ui, name=longname):
'''Return the location of the "global" largefiles cache.'''
- path = ui.configpath(longname, 'usercache')
+ path = ui.configpath(name, 'usercache')
if path:
return path
if pycompat.iswindows:
appdata = encoding.environ.get('LOCALAPPDATA',\
encoding.environ.get('APPDATA'))
if appdata:
- return os.path.join(appdata, longname)
+ return os.path.join(appdata, name)
elif pycompat.isdarwin:
home = encoding.environ.get('HOME')
if home:
- return os.path.join(home, 'Library', 'Caches', longname)
+ return os.path.join(home, 'Library', 'Caches', name)
elif pycompat.isposix:
path = encoding.environ.get('XDG_CACHE_HOME')
if path:
- return os.path.join(path, longname)
+ return os.path.join(path, name)
home = encoding.environ.get('HOME')
if home:
- return os.path.join(home, '.cache', longname)
+ return os.path.join(home, '.cache', name)
else:
raise error.Abort(_('unknown operating system: %s\n')
% pycompat.osname)
- raise error.Abort(_('unknown %s usercache location') % longname)
+ raise error.Abort(_('unknown %s usercache location') % name)
def inusercache(ui, hash):
path = usercachepath(ui, hash)
--- a/hgext/largefiles/overrides.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/overrides.py Mon Jan 22 17:53:02 2018 -0500
@@ -21,6 +21,7 @@
hg,
match as matchmod,
pathutil,
+ pycompat,
registrar,
scmutil,
smartset,
@@ -156,7 +157,7 @@
# Need to lock, otherwise there could be a race condition between
# when standins are created and added to the repo.
with repo.wlock():
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
standins = []
lfdirstate = lfutil.openlfdirstate(ui, repo)
for f in lfnames:
@@ -177,7 +178,7 @@
return added, bad
def removelargefiles(ui, repo, isaddremove, matcher, **opts):
- after = opts.get('after')
+ after = opts.get(r'after')
m = composelargefilematcher(matcher, repo[None].manifest())
try:
repo.lfstatus = True
@@ -221,11 +222,11 @@
name = m.rel(f)
ui.status(_('removing %s\n') % name)
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
if not after:
repo.wvfs.unlinkpath(f, ignoremissing=True)
- if opts.get('dry_run'):
+ if opts.get(r'dry_run'):
return result
remove = [lfutil.standin(f) for f in remove]
@@ -252,7 +253,7 @@
# -- Wrappers: modify existing commands --------------------------------
def overrideadd(orig, ui, repo, *pats, **opts):
- if opts.get('normal') and opts.get('large'):
+ if opts.get(r'normal') and opts.get(r'large'):
raise error.Abort(_('--normal cannot be used with --large'))
return orig(ui, repo, *pats, **opts)
@@ -403,9 +404,9 @@
setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
def overrideverify(orig, ui, repo, *pats, **opts):
- large = opts.pop('large', False)
- all = opts.pop('lfa', False)
- contents = opts.pop('lfc', False)
+ large = opts.pop(r'large', False)
+ all = opts.pop(r'lfa', False)
+ contents = opts.pop(r'lfc', False)
result = orig(ui, repo, *pats, **opts)
if large or all or contents:
@@ -413,7 +414,7 @@
return result
def overridedebugstate(orig, ui, repo, *pats, **opts):
- large = opts.pop('large', False)
+ large = opts.pop(r'large', False)
if large:
class fakerepo(object):
dirstate = lfutil.openlfdirstate(ui, repo)
@@ -802,8 +803,8 @@
repo.lfpullsource = source
result = orig(ui, repo, source, **opts)
revspostpull = len(repo)
- lfrevs = opts.get('lfrev', [])
- if opts.get('all_largefiles'):
+ lfrevs = opts.get(r'lfrev', [])
+ if opts.get(r'all_largefiles'):
lfrevs.append('pulled()')
if lfrevs and revspostpull > revsprepull:
numcached = 0
@@ -820,7 +821,7 @@
def overridepush(orig, ui, repo, *args, **kwargs):
"""Override push command and store --lfrev parameters in opargs"""
- lfrevs = kwargs.pop('lfrev', None)
+ lfrevs = kwargs.pop(r'lfrev', None)
if lfrevs:
opargs = kwargs.setdefault('opargs', {})
opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
@@ -828,7 +829,7 @@
def exchangepushoperation(orig, *args, **kwargs):
"""Override pushoperation constructor and store lfrevs parameter"""
- lfrevs = kwargs.pop('lfrevs', None)
+ lfrevs = kwargs.pop(r'lfrevs', None)
pushop = orig(*args, **kwargs)
pushop.lfrevs = lfrevs
return pushop
@@ -865,7 +866,7 @@
d = dest
if d is None:
d = hg.defaultdest(source)
- if opts.get('all_largefiles') and not hg.islocal(d):
+ if opts.get(r'all_largefiles') and not hg.islocal(d):
raise error.Abort(_(
'--all-largefiles is incompatible with non-local destination %s') %
d)
@@ -887,13 +888,13 @@
# If largefiles is required for this repo, permanently enable it locally
if 'largefiles' in repo.requirements:
- with repo.vfs('hgrc', 'a', text=True) as fp:
- fp.write('\n[extensions]\nlargefiles=\n')
+ repo.vfs.append('hgrc',
+ util.tonativeeol('\n[extensions]\nlargefiles=\n'))
# Caching is implicitly limited to 'rev' option, since the dest repo was
# truncated at that point. The user may expect a download count with
# this option, so attempt whether or not this is a largefile repo.
- if opts.get('all_largefiles'):
+ if opts.get(r'all_largefiles'):
success, missing = lfcommands.downloadlfiles(ui, repo, None)
if missing != 0:
@@ -906,14 +907,14 @@
# If largefiles is required for this repo, permanently enable it locally
if 'largefiles' in destrepo.requirements:
- with destrepo.vfs('hgrc', 'a+', text=True) as fp:
- fp.write('\n[extensions]\nlargefiles=\n')
+ destrepo.vfs.append('hgrc',
+ util.tonativeeol('\n[extensions]\nlargefiles=\n'))
def overriderebase(orig, ui, repo, **opts):
if not util.safehasattr(repo, '_largefilesenabled'):
return orig(ui, repo, **opts)
- resuming = opts.get('continue')
+ resuming = opts.get(r'continue')
repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
repo._lfstatuswriters.append(lambda *msg, **opts: None)
try:
@@ -1272,6 +1273,7 @@
repo.status = overridestatus
orig(ui, repo, *dirs, **opts)
repo.status = oldstatus
+
def overriderollback(orig, ui, repo, **opts):
with repo.wlock():
before = repo.dirstate.parents()
@@ -1310,7 +1312,7 @@
return result
def overridetransplant(orig, ui, repo, *revs, **opts):
- resuming = opts.get('continue')
+ resuming = opts.get(r'continue')
repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
repo._lfstatuswriters.append(lambda *msg, **opts: None)
try:
@@ -1321,6 +1323,7 @@
return result
def overridecat(orig, ui, repo, file1, *pats, **opts):
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'))
err = 1
notbad = set()
@@ -1382,7 +1385,7 @@
def mergeupdate(orig, repo, node, branchmerge, force,
*args, **kwargs):
- matcher = kwargs.get('matcher', None)
+ matcher = kwargs.get(r'matcher', None)
# note if this is a partial update
partial = matcher and not matcher.always()
with repo.wlock():
@@ -1437,7 +1440,7 @@
# Make sure the merge runs on disk, not in-memory. largefiles is not a
# good candidate for in-memory merge (large files, custom dirstate,
# matcher usage).
- kwargs['wc'] = repo[None]
+ kwargs[r'wc'] = repo[None]
result = orig(repo, node, branchmerge, force, *args, **kwargs)
newstandins = lfutil.getstandinsstate(repo)
@@ -1470,3 +1473,20 @@
printmessage=False, normallookup=True)
return result
+
+def upgraderequirements(orig, repo):
+ reqs = orig(repo)
+ if 'largefiles' in repo.requirements:
+ reqs.add('largefiles')
+ return reqs
+
+_lfscheme = 'largefile://'
+def openlargefile(orig, ui, url_, data=None):
+ if url_.startswith(_lfscheme):
+ if data:
+ msg = "cannot use data on a 'largefile://' url"
+ raise error.ProgrammingError(msg)
+ lfid = url_[len(_lfscheme):]
+ return storefactory.getlfile(ui, lfid)
+ else:
+ return orig(ui, url_, data=data)
--- a/hgext/largefiles/proto.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/proto.py Mon Jan 22 17:53:02 2018 -0500
@@ -28,7 +28,6 @@
'file.\n')
# these will all be replaced by largefiles.uisetup
-capabilitiesorig = None
ssholdcallstream = None
httpoldcallstream = None
@@ -76,7 +75,7 @@
yield '%d\n' % length
for chunk in util.filechunkiter(f):
yield chunk
- return wireproto.streamres(gen=generator())
+ return wireproto.streamres_legacy(gen=generator())
def statlfile(repo, proto, sha):
'''Server command for checking if a largefile is present - returns '2\n' if
@@ -161,9 +160,11 @@
repo.__class__ = lfileswirerepository
# advertise the largefiles=serve capability
-def capabilities(repo, proto):
- '''Wrap server command to announce largefile server capability'''
- return capabilitiesorig(repo, proto) + ' largefiles=serve'
+def _capabilities(orig, repo, proto):
+ '''announce largefile server capability'''
+ caps = orig(repo, proto)
+ caps.append('largefiles=serve')
+ return caps
def heads(repo, proto):
'''Wrap server command - largefile capable clients will know to call
@@ -176,7 +177,7 @@
if cmd == 'heads' and self.capable('largefiles'):
cmd = 'lheads'
if cmd == 'batch' and self.capable('largefiles'):
- args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
+ args[r'cmds'] = args[r'cmds'].replace('heads ', 'lheads ')
return ssholdcallstream(self, cmd, **args)
headsre = re.compile(r'(^|;)heads\b')
@@ -185,5 +186,5 @@
if cmd == 'heads' and self.capable('largefiles'):
cmd = 'lheads'
if cmd == 'batch' and self.capable('largefiles'):
- args['cmds'] = headsre.sub('lheads', args['cmds'])
+ args[r'cmds'] = headsre.sub('lheads', args[r'cmds'])
return httpoldcallstream(self, cmd, **args)
--- a/hgext/largefiles/remotestore.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/remotestore.py Mon Jan 22 17:53:02 2018 -0500
@@ -27,7 +27,9 @@
'''a largefile store accessed over a network'''
def __init__(self, ui, repo, url):
super(remotestore, self).__init__(ui, repo, url)
- self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
+ self._lstore = None
+ if repo is not None:
+ self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
def put(self, source, hash):
if self.sendfile(source, hash):
--- a/hgext/largefiles/reposetup.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/reposetup.py Mon Jan 22 17:53:02 2018 -0500
@@ -138,7 +138,7 @@
sf = lfutil.standin(f)
if sf in dirstate:
newfiles.append(sf)
- elif sf in dirstate.dirs():
+ elif dirstate.hasdir(sf):
# Directory entries could be regular or
# standin, check both
newfiles.extend((f, sf))
@@ -156,7 +156,7 @@
def sfindirstate(f):
sf = lfutil.standin(f)
dirstate = self.dirstate
- return sf in dirstate or sf in dirstate.dirs()
+ return sf in dirstate or dirstate.hasdir(sf)
match._files = [f for f in match._files
if sfindirstate(f)]
--- a/hgext/largefiles/storefactory.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/storefactory.py Mon Jan 22 17:53:02 2018 -0500
@@ -22,8 +22,9 @@
# During clone this function is passed the src's ui object
# but it needs the dest's ui object so it can read out of
# the config file. Use repo.ui instead.
-def openstore(repo, remote=None, put=False):
- ui = repo.ui
+def openstore(repo=None, remote=None, put=False, ui=None):
+ if ui is None:
+ ui = repo.ui
if not remote:
lfpullsource = getattr(repo, 'lfpullsource', None)
@@ -37,12 +38,16 @@
# ui.expandpath() leaves 'default-push' and 'default' alone if
# they cannot be expanded: fallback to the empty string,
# meaning the current directory.
- if path == 'default-push' or path == 'default':
+ if repo is None:
+ path = ui.expandpath('default')
+ path, _branches = hg.parseurl(path)
+ remote = hg.peer(repo or ui, {}, path)
+ elif path == 'default-push' or path == 'default':
path = ''
remote = repo
else:
path, _branches = hg.parseurl(path)
- remote = hg.peer(repo, {}, path)
+ remote = hg.peer(repo or ui, {}, path)
# The path could be a scheme so use Mercurial's normal functionality
# to resolve the scheme to a repository and use its path
@@ -76,3 +81,6 @@
}
_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
+
+def getlfile(ui, hash):
+ return util.chunkbuffer(openstore(ui=ui)._get(hash))
--- a/hgext/largefiles/uisetup.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/largefiles/uisetup.py Mon Jan 22 17:53:02 2018 -0500
@@ -30,6 +30,8 @@
scmutil,
sshpeer,
subrepo,
+ upgrade,
+ url,
wireproto,
)
@@ -60,6 +62,12 @@
extensions.wrapfunction(copies, 'pathcopies', overrides.copiespathcopies)
+ extensions.wrapfunction(upgrade, 'preservedrequirements',
+ overrides.upgraderequirements)
+
+ extensions.wrapfunction(upgrade, 'supporteddestrequirements',
+ overrides.upgraderequirements)
+
# Subrepos call status function
entry = extensions.wrapcommand(commands.table, 'status',
overrides.overridestatus)
@@ -153,13 +161,15 @@
extensions.wrapfunction(scmutil, 'marktouched',
overrides.scmutilmarktouched)
+ extensions.wrapfunction(url, 'open',
+ overrides.openlargefile)
+
# create the new wireproto commands ...
wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
# ... and wrap some existing ones
- wireproto.commands['capabilities'] = (proto.capabilities, '')
wireproto.commands['heads'] = (proto.heads, '')
wireproto.commands['lheads'] = (wireproto.heads, '')
@@ -171,10 +181,7 @@
extensions.wrapfunction(webcommands, 'decodepath', overrides.decodepath)
- # the hello wireproto command uses wireproto.capabilities, so it won't see
- # our largefiles capability unless we replace the actual function as well.
- proto.capabilitiesorig = wireproto.capabilities
- wireproto.capabilities = proto.capabilities
+ extensions.wrapfunction(wireproto, '_capabilities', proto._capabilities)
# can't do this in reposetup because it needs to have happened before
# wirerepo.__init__ is called
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/__init__.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,387 @@
+# lfs - hash-preserving large file support using Git-LFS protocol
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""lfs - large file support (EXPERIMENTAL)
+
+This extension allows large files to be tracked outside of the normal
+repository storage and stored on a centralized server, similar to the
+``largefiles`` extension. The ``git-lfs`` protocol is used when
+communicating with the server, so existing git infrastructure can be
+harnessed. Even though the files are stored outside of the repository,
+they are still integrity checked in the same manner as normal files.
+
+The files stored outside of the repository are downloaded on demand,
+which reduces the time to clone, and possibly the local disk usage.
+This changes fundamental workflows in a DVCS, so careful thought
+should be given before deploying it. :hg:`convert` can be used to
+convert LFS repositories to normal repositories that no longer
+require this extension, and do so without changing the commit hashes.
+This allows the extension to be disabled if the centralized workflow
+becomes burdensome. However, the pre and post convert clones will
+not be able to communicate with each other unless the extension is
+enabled on both.
+
+To start a new repository, or add new LFS files, just create and add
+an ``.hglfs`` file as described below. Because the file is tracked in
+the repository, all clones will use the same selection policy. During
+subsequent commits, Mercurial will consult this file to determine if
+an added or modified file should be stored externally. The type of
+storage depends on the characteristics of the file at each commit. A
+file that is near a size threshold may switch back and forth between
+LFS and normal storage, as needed.
+
+Alternately, both normal repositories and largefile controlled
+repositories can be converted to LFS by using :hg:`convert` and the
+``lfs.track`` config option described below. The ``.hglfs`` file
+should then be created and added, to control subsequent LFS selection.
+The hashes are also unchanged in this case. The LFS and non-LFS
+repositories can be distinguished because the LFS repository will
+abort any command if this extension is disabled.
+
+Committed LFS files are held locally, until the repository is pushed.
+Prior to pushing the normal repository data, the LFS files that are
+tracked by the outgoing commits are automatically uploaded to the
+configured central server. No LFS files are transferred on
+:hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
+demand as they need to be read, if a cached copy cannot be found
+locally. Both committing and downloading an LFS file will link the
+file to a usercache, to speed up future access. See the `usercache`
+config setting described below.
+
+.hglfs::
+
+ The extension reads its configuration from a versioned ``.hglfs``
+ configuration file found in the root of the working directory. The
+ ``.hglfs`` file uses the same syntax as all other Mercurial
+ configuration files. It uses a single section, ``[track]``.
+
+ The ``[track]`` section specifies which files are stored as LFS (or
+ not). Each line is keyed by a file pattern, with a predicate value.
+ The first file pattern match is used, so put more specific patterns
+ first. The available predicates are ``all()``, ``none()``, and
+ ``size()``. See "hg help filesets.size" for the latter.
+
+ Example versioned ``.hglfs`` file::
+
+ [track]
+ # No Makefile or python file, anywhere, will be LFS
+ **Makefile = none()
+ **.py = none()
+
+ **.zip = all()
+ **.exe = size(">1MB")
+
+ # Catchall for everything not matched above
+ ** = size(">10MB")
+
+Configs::
+
+ [lfs]
+ # Remote endpoint. Multiple protocols are supported:
+ # - http(s)://user:pass@example.com/path
+ # git-lfs endpoint
+ # - file:///tmp/path
+ # local filesystem, usually for testing
+ # if unset, lfs will prompt setting this when it must use this value.
+ # (default: unset)
+ url = https://example.com/repo.git/info/lfs
+
+ # Which files to track in LFS. Path tests are "**.extname" for file
+ # extensions, and "path:under/some/directory" for path prefix. Both
+ # are relative to the repository root.
+ # File size can be tested with the "size()" fileset, and tests can be
+ # joined with fileset operators. (See "hg help filesets.operators".)
+ #
+ # Some examples:
+ # - all() # everything
+ # - none() # nothing
+ # - size(">20MB") # larger than 20MB
+ # - !**.txt # anything not a *.txt file
+ # - **.zip | **.tar.gz | **.7z # some types of compressed files
+ # - path:bin # files under "bin" in the project root
+ # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
+ # | (path:bin & !path:/bin/README) | size(">1GB")
+ # (default: none())
+ #
+ # This is ignored if there is a tracked '.hglfs' file, and this setting
+ # will eventually be deprecated and removed.
+ track = size(">10M")
+
+ # how many times to retry before giving up on transferring an object
+ retry = 5
+
+ # the local directory to store lfs files for sharing across local clones.
+ # If not set, the cache is located in an OS specific cache location.
+ usercache = /path/to/global/cache
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+
+from mercurial import (
+ bundle2,
+ changegroup,
+ cmdutil,
+ config,
+ context,
+ error,
+ exchange,
+ extensions,
+ filelog,
+ fileset,
+ hg,
+ localrepo,
+ minifileset,
+ node,
+ pycompat,
+ registrar,
+ revlog,
+ scmutil,
+ templatekw,
+ upgrade,
+ util,
+ vfs as vfsmod,
+ wireproto,
+)
+
+from . import (
+ blobstore,
+ wrapper,
+)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('experimental', 'lfs.user-agent',
+ default=None,
+)
+configitem('experimental', 'lfs.worker-enable',
+ default=False,
+)
+
+configitem('lfs', 'url',
+ default=None,
+)
+configitem('lfs', 'usercache',
+ default=None,
+)
+# Deprecated
+configitem('lfs', 'threshold',
+ default=None,
+)
+configitem('lfs', 'track',
+ default='none()',
+)
+configitem('lfs', 'retry',
+ default=5,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+templatekeyword = registrar.templatekeyword()
+
+def featuresetup(ui, supported):
+ # don't die on seeing a repo with the lfs requirement
+ supported |= {'lfs'}
+
+def uisetup(ui):
+ localrepo.localrepository.featuresetupfuncs.add(featuresetup)
+
+def reposetup(ui, repo):
+ # Nothing to do with a remote repo
+ if not repo.local():
+ return
+
+ repo.svfs.lfslocalblobstore = blobstore.local(repo)
+ repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
+
+ class lfsrepo(repo.__class__):
+ @localrepo.unfilteredmethod
+ def commitctx(self, ctx, error=False):
+ repo.svfs.options['lfstrack'] = _trackedmatcher(self, ctx)
+ return super(lfsrepo, self).commitctx(ctx, error)
+
+ repo.__class__ = lfsrepo
+
+ if 'lfs' not in repo.requirements:
+ def checkrequireslfs(ui, repo, **kwargs):
+ if 'lfs' not in repo.requirements:
+ last = kwargs.get('node_last')
+ _bin = node.bin
+ if last:
+ s = repo.set('%n:%n', _bin(kwargs['node']), _bin(last))
+ else:
+ s = repo.set('%n', _bin(kwargs['node']))
+ for ctx in s:
+ # TODO: is there a way to just walk the files in the commit?
+ if any(ctx[f].islfs() for f in ctx.files() if f in ctx):
+ repo.requirements.add('lfs')
+ repo._writerequirements()
+ repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
+ break
+
+ ui.setconfig('hooks', 'commit.lfs', checkrequireslfs, 'lfs')
+ ui.setconfig('hooks', 'pretxnchangegroup.lfs', checkrequireslfs, 'lfs')
+ else:
+ repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
+
+def _trackedmatcher(repo, ctx):
+ """Return a function (path, size) -> bool indicating whether or not to
+ track a given file with lfs."""
+ data = ''
+
+ if '.hglfs' in ctx.added() or '.hglfs' in ctx.modified():
+ data = ctx['.hglfs'].data()
+ elif '.hglfs' not in ctx.removed():
+ p1 = repo['.']
+
+ if '.hglfs' not in p1:
+ # No '.hglfs' in wdir or in parent. Fallback to config
+ # for now.
+ trackspec = repo.ui.config('lfs', 'track')
+
+ # deprecated config: lfs.threshold
+ threshold = repo.ui.configbytes('lfs', 'threshold')
+ if threshold:
+ fileset.parse(trackspec) # make sure syntax errors are confined
+ trackspec = "(%s) | size('>%d')" % (trackspec, threshold)
+
+ return minifileset.compile(trackspec)
+
+ data = p1['.hglfs'].data()
+
+ # In removed, or not in parent
+ if not data:
+ return lambda p, s: False
+
+ # Parse errors here will abort with a message that points to the .hglfs file
+ # and line number.
+ cfg = config.config()
+ cfg.parse('.hglfs', data)
+
+ try:
+ rules = [(minifileset.compile(pattern), minifileset.compile(rule))
+ for pattern, rule in cfg.items('track')]
+ except error.ParseError as e:
+ # The original exception gives no indicator that the error is in the
+ # .hglfs file, so add that.
+
+ # TODO: See if the line number of the file can be made available.
+ raise error.Abort(_('parse error in .hglfs: %s') % e)
+
+ def _match(path, size):
+ for pat, rule in rules:
+ if pat(path, size):
+ return rule(path, size)
+
+ return False
+
+ return _match
+
+def wrapfilelog(filelog):
+ wrapfunction = extensions.wrapfunction
+
+ wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
+ wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
+ wrapfunction(filelog, 'size', wrapper.filelogsize)
+
+def extsetup(ui):
+ wrapfilelog(filelog.filelog)
+
+ wrapfunction = extensions.wrapfunction
+
+ wrapfunction(cmdutil, '_updatecatformatter', wrapper._updatecatformatter)
+ wrapfunction(scmutil, 'wrapconvertsink', wrapper.convertsink)
+
+ wrapfunction(upgrade, '_finishdatamigration',
+ wrapper.upgradefinishdatamigration)
+
+ wrapfunction(upgrade, 'preservedrequirements',
+ wrapper.upgraderequirements)
+
+ wrapfunction(upgrade, 'supporteddestrequirements',
+ wrapper.upgraderequirements)
+
+ wrapfunction(changegroup,
+ 'supportedoutgoingversions',
+ wrapper.supportedoutgoingversions)
+ wrapfunction(changegroup,
+ 'allsupportedversions',
+ wrapper.allsupportedversions)
+
+ wrapfunction(exchange, 'push', wrapper.push)
+ wrapfunction(wireproto, '_capabilities', wrapper._capabilities)
+
+ wrapfunction(context.basefilectx, 'cmp', wrapper.filectxcmp)
+ wrapfunction(context.basefilectx, 'isbinary', wrapper.filectxisbinary)
+ context.basefilectx.islfs = wrapper.filectxislfs
+
+ revlog.addflagprocessor(
+ revlog.REVIDX_EXTSTORED,
+ (
+ wrapper.readfromstore,
+ wrapper.writetostore,
+ wrapper.bypasscheckhash,
+ ),
+ )
+
+ wrapfunction(hg, 'clone', wrapper.hgclone)
+ wrapfunction(hg, 'postshare', wrapper.hgpostshare)
+
+ # Make bundle choose changegroup3 instead of changegroup2. This affects
+ # "hg bundle" command. Note: it does not cover all bundle formats like
+ # "packed1". Using "packed1" with lfs will likely cause trouble.
+ names = [k for k, v in exchange._bundlespeccgversions.items() if v == '02']
+ for k in names:
+ exchange._bundlespeccgversions[k] = '03'
+
+ # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
+ # options and blob stores are passed from othervfs to the new readonlyvfs.
+ wrapfunction(vfsmod.readonlyvfs, '__init__', wrapper.vfsinit)
+
+ # when writing a bundle via "hg bundle" command, upload related LFS blobs
+ wrapfunction(bundle2, 'writenewbundle', wrapper.writenewbundle)
+
+@templatekeyword('lfs_files')
+def lfsfiles(repo, ctx, **args):
+ """List of strings. LFS files added or modified by the changeset."""
+ args = pycompat.byteskwargs(args)
+
+ pointers = wrapper.pointersfromctx(ctx) # {path: pointer}
+ files = sorted(pointers.keys())
+
+ def pointer(v):
+ # In the file spec, version is first and the other keys are sorted.
+ sortkeyfunc = lambda x: (x[0] != 'version', x)
+ items = sorted(pointers[v].iteritems(), key=sortkeyfunc)
+ return util.sortdict(items)
+
+ makemap = lambda v: {
+ 'file': v,
+ 'oid': pointers[v].oid(),
+ 'pointer': templatekw.hybriddict(pointer(v)),
+ }
+
+ # TODO: make the separator ', '?
+ f = templatekw._showlist('lfs_file', files, args)
+ return templatekw._hybrid(f, files, makemap, pycompat.identity)
+
+@command('debuglfsupload',
+ [('r', 'rev', [], _('upload large files introduced by REV'))])
+def debuglfsupload(ui, repo, **opts):
+ """upload lfs blobs added by the working copy parent or given revisions"""
+ revs = opts.get('rev', [])
+ pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
+ wrapper.uploadblobs(repo, pointers)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/blobstore.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,463 @@
+# blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import hashlib
+import json
+import os
+import re
+import socket
+
+from mercurial.i18n import _
+
+from mercurial import (
+ error,
+ pathutil,
+ url as urlmod,
+ util,
+ vfs as vfsmod,
+ worker,
+)
+
+from ..largefiles import lfutil
+
+# 64 bytes for SHA256
+_lfsre = re.compile(r'\A[a-f0-9]{64}\Z')
+
+class lfsvfs(vfsmod.vfs):
+ def join(self, path):
+ """split the path at first two characters, like: XX/XXXXX..."""
+ if not _lfsre.match(path):
+ raise error.ProgrammingError('unexpected lfs path: %s' % path)
+ return super(lfsvfs, self).join(path[0:2], path[2:])
+
+ def walk(self, path=None, onerror=None):
+ """Yield (dirpath, [], oids) tuple for blobs under path
+
+ Oids only exist in the root of this vfs, so dirpath is always ''.
+ """
+ root = os.path.normpath(self.base)
+ # when dirpath == root, dirpath[prefixlen:] becomes empty
+ # because len(dirpath) < prefixlen.
+ prefixlen = len(pathutil.normasprefix(root))
+ oids = []
+
+ for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''),
+ onerror=onerror):
+ dirpath = dirpath[prefixlen:]
+
+ # Silently skip unexpected files and directories
+ if len(dirpath) == 2:
+ oids.extend([dirpath + f for f in files
+ if _lfsre.match(dirpath + f)])
+
+ yield ('', [], oids)
+
+class filewithprogress(object):
+ """a file-like object that supports __len__ and read.
+
+ Useful to provide progress information for how many bytes are read.
+ """
+
+ def __init__(self, fp, callback):
+ self._fp = fp
+ self._callback = callback # func(readsize)
+ fp.seek(0, os.SEEK_END)
+ self._len = fp.tell()
+ fp.seek(0)
+
+ def __len__(self):
+ return self._len
+
+ def read(self, size):
+ if self._fp is None:
+ return b''
+ data = self._fp.read(size)
+ if data:
+ if self._callback:
+ self._callback(len(data))
+ else:
+ self._fp.close()
+ self._fp = None
+ return data
+
+class local(object):
+ """Local blobstore for large file contents.
+
+ This blobstore is used both as a cache and as a staging area for large blobs
+ to be uploaded to the remote blobstore.
+ """
+
+ def __init__(self, repo):
+ fullpath = repo.svfs.join('lfs/objects')
+ self.vfs = lfsvfs(fullpath)
+ usercache = lfutil._usercachedir(repo.ui, 'lfs')
+ self.cachevfs = lfsvfs(usercache)
+ self.ui = repo.ui
+
+ def open(self, oid):
+ """Open a read-only file descriptor to the named blob, in either the
+ usercache or the local store."""
+ # The usercache is the most likely place to hold the file. Commit will
+ # write to both it and the local store, as will anything that downloads
+ # the blobs. However, things like clone without an update won't
+ # populate the local store. For an init + push of a local clone,
+ # the usercache is the only place it _could_ be. If not present, the
+ # missing file msg here will indicate the local repo, not the usercache.
+ if self.cachevfs.exists(oid):
+ return self.cachevfs(oid, 'rb')
+
+ return self.vfs(oid, 'rb')
+
+ def download(self, oid, src):
+ """Read the blob from the remote source in chunks, verify the content,
+ and write to this local blobstore."""
+ sha256 = hashlib.sha256()
+
+ with self.vfs(oid, 'wb', atomictemp=True) as fp:
+ for chunk in util.filechunkiter(src, size=1048576):
+ fp.write(chunk)
+ sha256.update(chunk)
+
+ realoid = sha256.hexdigest()
+ if realoid != oid:
+ raise error.Abort(_('corrupt remote lfs object: %s') % oid)
+
+ # XXX: should we verify the content of the cache, and hardlink back to
+ # the local store on success, but truncate, write and link on failure?
+ if not self.cachevfs.exists(oid):
+ self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
+ lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
+
+ def write(self, oid, data):
+ """Write blob to local blobstore.
+
+ This should only be called from the filelog during a commit or similar.
+ As such, there is no need to verify the data. Imports from a remote
+ store must use ``download()`` instead."""
+ with self.vfs(oid, 'wb', atomictemp=True) as fp:
+ fp.write(data)
+
+ # XXX: should we verify the content of the cache, and hardlink back to
+ # the local store on success, but truncate, write and link on failure?
+ if not self.cachevfs.exists(oid):
+ self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
+ lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
+
+ def read(self, oid, verify=True):
+ """Read blob from local blobstore."""
+ if not self.vfs.exists(oid):
+ blob = self._read(self.cachevfs, oid, verify)
+
+ # Even if revlog will verify the content, it needs to be verified
+ # now before making the hardlink to avoid propagating corrupt blobs.
+ # Don't abort if corruption is detected, because `hg verify` will
+ # give more useful info about the corruption- simply don't add the
+ # hardlink.
+ if verify or hashlib.sha256(blob).hexdigest() == oid:
+ self.ui.note(_('lfs: found %s in the usercache\n') % oid)
+ lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
+ else:
+ self.ui.note(_('lfs: found %s in the local lfs store\n') % oid)
+ blob = self._read(self.vfs, oid, verify)
+ return blob
+
+ def _read(self, vfs, oid, verify):
+ """Read blob (after verifying) from the given store"""
+ blob = vfs.read(oid)
+ if verify:
+ _verify(oid, blob)
+ return blob
+
+ def has(self, oid):
+ """Returns True if the local blobstore contains the requested blob,
+ False otherwise."""
+ return self.cachevfs.exists(oid) or self.vfs.exists(oid)
+
+class _gitlfsremote(object):
+
+ def __init__(self, repo, url):
+ ui = repo.ui
+ self.ui = ui
+ baseurl, authinfo = url.authinfo()
+ self.baseurl = baseurl.rstrip('/')
+ useragent = repo.ui.config('experimental', 'lfs.user-agent')
+ if not useragent:
+ useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version()
+ self.urlopener = urlmod.opener(ui, authinfo, useragent)
+ self.retry = ui.configint('lfs', 'retry')
+
+ def writebatch(self, pointers, fromstore):
+ """Batch upload from local to remote blobstore."""
+ self._batch(pointers, fromstore, 'upload')
+
+ def readbatch(self, pointers, tostore):
+ """Batch download from remote to local blostore."""
+ self._batch(pointers, tostore, 'download')
+
+ def _batchrequest(self, pointers, action):
+ """Get metadata about objects pointed by pointers for given action
+
+ Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
+ See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
+ """
+ objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
+ requestdata = json.dumps({
+ 'objects': objects,
+ 'operation': action,
+ })
+ batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
+ data=requestdata)
+ batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
+ batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
+ try:
+ rawjson = self.urlopener.open(batchreq).read()
+ except util.urlerr.httperror as ex:
+ raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
+ % (ex, action))
+ try:
+ response = json.loads(rawjson)
+ except ValueError:
+ raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
+ % rawjson)
+ return response
+
+ def _checkforservererror(self, pointers, responses, action):
+ """Scans errors from objects
+
+ Raises LfsRemoteError if any objects have an error"""
+ for response in responses:
+ # The server should return 404 when objects cannot be found. Some
+ # server implementation (ex. lfs-test-server) does not set "error"
+ # but just removes "download" from "actions". Treat that case
+ # as the same as 404 error.
+ notfound = (response.get('error', {}).get('code') == 404
+ or (action == 'download'
+ and action not in response.get('actions', [])))
+ if notfound:
+ ptrmap = {p.oid(): p for p in pointers}
+ p = ptrmap.get(response['oid'], None)
+ if p:
+ filename = getattr(p, 'filename', 'unknown')
+ raise LfsRemoteError(
+ _(('LFS server error. Remote object '
+ 'for "%s" not found: %r')) % (filename, response))
+ else:
+ raise LfsRemoteError(
+ _('LFS server error. Unsolicited response for oid %s')
+ % response['oid'])
+ if 'error' in response:
+ raise LfsRemoteError(_('LFS server error: %r') % response)
+
+ def _extractobjects(self, response, pointers, action):
+ """extract objects from response of the batch API
+
+ response: parsed JSON object returned by batch API
+ return response['objects'] filtered by action
+ raise if any object has an error
+ """
+ # Scan errors from objects - fail early
+ objects = response.get('objects', [])
+ self._checkforservererror(pointers, objects, action)
+
+ # Filter objects with given action. Practically, this skips uploading
+ # objects which exist in the server.
+ filteredobjects = [o for o in objects if action in o.get('actions', [])]
+
+ return filteredobjects
+
+ def _basictransfer(self, obj, action, localstore):
+ """Download or upload a single object using basic transfer protocol
+
+ obj: dict, an object description returned by batch API
+ action: string, one of ['upload', 'download']
+ localstore: blobstore.local
+
+ See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
+ basic-transfers.md
+ """
+ oid = str(obj['oid'])
+
+ href = str(obj['actions'][action].get('href'))
+ headers = obj['actions'][action].get('header', {}).items()
+
+ request = util.urlreq.request(href)
+ if action == 'upload':
+ # If uploading blobs, read data from local blobstore.
+ with localstore.open(oid) as fp:
+ _verifyfile(oid, fp)
+ request.data = filewithprogress(localstore.open(oid), None)
+ request.get_method = lambda: 'PUT'
+
+ for k, v in headers:
+ request.add_header(k, v)
+
+ response = b''
+ try:
+ req = self.urlopener.open(request)
+ if action == 'download':
+ # If downloading blobs, store downloaded data to local blobstore
+ localstore.download(oid, req)
+ else:
+ while True:
+ data = req.read(1048576)
+ if not data:
+ break
+ response += data
+ if response:
+ self.ui.debug('lfs %s response: %s' % (action, response))
+ except util.urlerr.httperror as ex:
+ if self.ui.debugflag:
+ self.ui.debug('%s: %s\n' % (oid, ex.read()))
+ raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
+ % (ex, oid, action))
+
+ def _batch(self, pointers, localstore, action):
+ if action not in ['upload', 'download']:
+ raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
+
+ response = self._batchrequest(pointers, action)
+ objects = self._extractobjects(response, pointers, action)
+ total = sum(x.get('size', 0) for x in objects)
+ sizes = {}
+ for obj in objects:
+ sizes[obj.get('oid')] = obj.get('size', 0)
+ topic = {'upload': _('lfs uploading'),
+ 'download': _('lfs downloading')}[action]
+ if len(objects) > 1:
+ self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
+ % (len(objects), util.bytecount(total)))
+ self.ui.progress(topic, 0, total=total)
+ def transfer(chunk):
+ for obj in chunk:
+ objsize = obj.get('size', 0)
+ if self.ui.verbose:
+ if action == 'download':
+ msg = _('lfs: downloading %s (%s)\n')
+ elif action == 'upload':
+ msg = _('lfs: uploading %s (%s)\n')
+ self.ui.note(msg % (obj.get('oid'),
+ util.bytecount(objsize)))
+ retry = self.retry
+ while True:
+ try:
+ self._basictransfer(obj, action, localstore)
+ yield 1, obj.get('oid')
+ break
+ except socket.error as ex:
+ if retry > 0:
+ self.ui.note(
+ _('lfs: failed: %r (remaining retry %d)\n')
+ % (ex, retry))
+ retry -= 1
+ continue
+ raise
+
+ # Until https multiplexing gets sorted out
+ if self.ui.configbool('experimental', 'lfs.worker-enable'):
+ oids = worker.worker(self.ui, 0.1, transfer, (),
+ sorted(objects, key=lambda o: o.get('oid')))
+ else:
+ oids = transfer(sorted(objects, key=lambda o: o.get('oid')))
+
+ processed = 0
+ for _one, oid in oids:
+ processed += sizes[oid]
+ self.ui.progress(topic, processed, total=total)
+ self.ui.note(_('lfs: processed: %s\n') % oid)
+ self.ui.progress(topic, pos=None, total=total)
+
+ def __del__(self):
+ # copied from mercurial/httppeer.py
+ urlopener = getattr(self, 'urlopener', None)
+ if urlopener:
+ for h in urlopener.handlers:
+ h.close()
+ getattr(h, "close_all", lambda : None)()
+
+class _dummyremote(object):
+ """Dummy store storing blobs to temp directory."""
+
+ def __init__(self, repo, url):
+ fullpath = repo.vfs.join('lfs', url.path)
+ self.vfs = lfsvfs(fullpath)
+
+ def writebatch(self, pointers, fromstore):
+ for p in pointers:
+ content = fromstore.read(p.oid(), verify=True)
+ with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
+ fp.write(content)
+
+ def readbatch(self, pointers, tostore):
+ for p in pointers:
+ with self.vfs(p.oid(), 'rb') as fp:
+ tostore.download(p.oid(), fp)
+
+class _nullremote(object):
+ """Null store storing blobs to /dev/null."""
+
+ def __init__(self, repo, url):
+ pass
+
+ def writebatch(self, pointers, fromstore):
+ pass
+
+ def readbatch(self, pointers, tostore):
+ pass
+
+class _promptremote(object):
+ """Prompt user to set lfs.url when accessed."""
+
+ def __init__(self, repo, url):
+ pass
+
+ def writebatch(self, pointers, fromstore, ui=None):
+ self._prompt()
+
+ def readbatch(self, pointers, tostore, ui=None):
+ self._prompt()
+
+ def _prompt(self):
+ raise error.Abort(_('lfs.url needs to be configured'))
+
+_storemap = {
+ 'https': _gitlfsremote,
+ 'http': _gitlfsremote,
+ 'file': _dummyremote,
+ 'null': _nullremote,
+ None: _promptremote,
+}
+
+def _verify(oid, content):
+ realoid = hashlib.sha256(content).hexdigest()
+ if realoid != oid:
+ raise error.Abort(_('detected corrupt lfs object: %s') % oid,
+ hint=_('run hg verify'))
+
+def _verifyfile(oid, fp):
+ sha256 = hashlib.sha256()
+ while True:
+ data = fp.read(1024 * 1024)
+ if not data:
+ break
+ sha256.update(data)
+ realoid = sha256.hexdigest()
+ if realoid != oid:
+ raise error.Abort(_('detected corrupt lfs object: %s') % oid,
+ hint=_('run hg verify'))
+
+def remote(repo):
+ """remotestore factory. return a store in _storemap depending on config"""
+ url = util.url(repo.ui.config('lfs', 'url') or '')
+ scheme = url.scheme
+ if scheme not in _storemap:
+ raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
+ return _storemap[scheme](repo, url)
+
+class LfsRemoteError(error.RevlogError):
+ pass
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/pointer.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,73 @@
+# pointer.py - Git-LFS pointer serialization
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import re
+
+from mercurial.i18n import _
+
+from mercurial import (
+ error,
+)
+
+class InvalidPointer(error.RevlogError):
+ pass
+
+class gitlfspointer(dict):
+ VERSION = 'https://git-lfs.github.com/spec/v1'
+
+ def __init__(self, *args, **kwargs):
+ self['version'] = self.VERSION
+ super(gitlfspointer, self).__init__(*args, **kwargs)
+
+ @classmethod
+ def deserialize(cls, text):
+ try:
+ return cls(l.split(' ', 1) for l in text.splitlines()).validate()
+ except ValueError: # l.split returns 1 item instead of 2
+ raise InvalidPointer(_('cannot parse git-lfs text: %r') % text)
+
+ def serialize(self):
+ sortkeyfunc = lambda x: (x[0] != 'version', x)
+ items = sorted(self.validate().iteritems(), key=sortkeyfunc)
+ return ''.join('%s %s\n' % (k, v) for k, v in items)
+
+ def oid(self):
+ return self['oid'].split(':')[-1]
+
+ def size(self):
+ return int(self['size'])
+
+ # regular expressions used by _validate
+ # see https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md
+ _keyre = re.compile(r'\A[a-z0-9.-]+\Z')
+ _valuere = re.compile(r'\A[^\n]*\Z')
+ _requiredre = {
+ 'size': re.compile(r'\A[0-9]+\Z'),
+ 'oid': re.compile(r'\Asha256:[0-9a-f]{64}\Z'),
+ 'version': re.compile(r'\A%s\Z' % re.escape(VERSION)),
+ }
+
+ def validate(self):
+ """raise InvalidPointer on error. return self if there is no error"""
+ requiredcount = 0
+ for k, v in self.iteritems():
+ if k in self._requiredre:
+ if not self._requiredre[k].match(v):
+ raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
+ requiredcount += 1
+ elif not self._keyre.match(k):
+ raise InvalidPointer(_('unexpected key: %s') % k)
+ if not self._valuere.match(v):
+ raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
+ if len(self._requiredre) != requiredcount:
+ miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
+ raise InvalidPointer(_('missed keys: %s') % ', '.join(miss))
+ return self
+
+deserialize = gitlfspointer.deserialize
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/lfs/wrapper.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,350 @@
+# wrapper.py - methods wrapping core mercurial logic
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import hashlib
+
+from mercurial.i18n import _
+from mercurial.node import bin, nullid, short
+
+from mercurial import (
+ error,
+ filelog,
+ revlog,
+ util,
+)
+
+from ..largefiles import lfutil
+
+from . import (
+ blobstore,
+ pointer,
+)
+
+def supportedoutgoingversions(orig, repo):
+ versions = orig(repo)
+ if 'lfs' in repo.requirements:
+ versions.discard('01')
+ versions.discard('02')
+ versions.add('03')
+ return versions
+
+def allsupportedversions(orig, ui):
+ versions = orig(ui)
+ versions.add('03')
+ return versions
+
+def _capabilities(orig, repo, proto):
+ '''Wrap server command to announce lfs server capability'''
+ caps = orig(repo, proto)
+ # XXX: change to 'lfs=serve' when separate git server isn't required?
+ caps.append('lfs')
+ return caps
+
+def bypasscheckhash(self, text):
+ return False
+
+def readfromstore(self, text):
+ """Read filelog content from local blobstore transform for flagprocessor.
+
+ Default tranform for flagprocessor, returning contents from blobstore.
+ Returns a 2-typle (text, validatehash) where validatehash is True as the
+ contents of the blobstore should be checked using checkhash.
+ """
+ p = pointer.deserialize(text)
+ oid = p.oid()
+ store = self.opener.lfslocalblobstore
+ if not store.has(oid):
+ p.filename = self.filename
+ self.opener.lfsremoteblobstore.readbatch([p], store)
+
+ # The caller will validate the content
+ text = store.read(oid, verify=False)
+
+ # pack hg filelog metadata
+ hgmeta = {}
+ for k in p.keys():
+ if k.startswith('x-hg-'):
+ name = k[len('x-hg-'):]
+ hgmeta[name] = p[k]
+ if hgmeta or text.startswith('\1\n'):
+ text = filelog.packmeta(hgmeta, text)
+
+ return (text, True)
+
+def writetostore(self, text):
+ # hg filelog metadata (includes rename, etc)
+ hgmeta, offset = filelog.parsemeta(text)
+ if offset and offset > 0:
+ # lfs blob does not contain hg filelog metadata
+ text = text[offset:]
+
+ # git-lfs only supports sha256
+ oid = hashlib.sha256(text).hexdigest()
+ self.opener.lfslocalblobstore.write(oid, text)
+
+ # replace contents with metadata
+ longoid = 'sha256:%s' % oid
+ metadata = pointer.gitlfspointer(oid=longoid, size=str(len(text)))
+
+ # by default, we expect the content to be binary. however, LFS could also
+ # be used for non-binary content. add a special entry for non-binary data.
+ # this will be used by filectx.isbinary().
+ if not util.binary(text):
+ # not hg filelog metadata (affecting commit hash), no "x-hg-" prefix
+ metadata['x-is-binary'] = '0'
+
+ # translate hg filelog metadata to lfs metadata with "x-hg-" prefix
+ if hgmeta is not None:
+ for k, v in hgmeta.iteritems():
+ metadata['x-hg-%s' % k] = v
+
+ rawtext = metadata.serialize()
+ return (rawtext, False)
+
+def _islfs(rlog, node=None, rev=None):
+ if rev is None:
+ if node is None:
+ # both None - likely working copy content where node is not ready
+ return False
+ rev = rlog.rev(node)
+ else:
+ node = rlog.node(rev)
+ if node == nullid:
+ return False
+ flags = rlog.flags(rev)
+ return bool(flags & revlog.REVIDX_EXTSTORED)
+
+def filelogaddrevision(orig, self, text, transaction, link, p1, p2,
+ cachedelta=None, node=None,
+ flags=revlog.REVIDX_DEFAULT_FLAGS, **kwds):
+ textlen = len(text)
+ # exclude hg rename meta from file size
+ meta, offset = filelog.parsemeta(text)
+ if offset:
+ textlen -= offset
+
+ lfstrack = self.opener.options['lfstrack']
+
+ # Always exclude hg owned files
+ if not self.filename.startswith('.hg') and lfstrack(self.filename, textlen):
+ flags |= revlog.REVIDX_EXTSTORED
+
+ return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
+ node=node, flags=flags, **kwds)
+
+def filelogrenamed(orig, self, node):
+ if _islfs(self, node):
+ rawtext = self.revision(node, raw=True)
+ if not rawtext:
+ return False
+ metadata = pointer.deserialize(rawtext)
+ if 'x-hg-copy' in metadata and 'x-hg-copyrev' in metadata:
+ return metadata['x-hg-copy'], bin(metadata['x-hg-copyrev'])
+ else:
+ return False
+ return orig(self, node)
+
+def filelogsize(orig, self, rev):
+ if _islfs(self, rev=rev):
+ # fast path: use lfs metadata to answer size
+ rawtext = self.revision(rev, raw=True)
+ metadata = pointer.deserialize(rawtext)
+ return int(metadata['size'])
+ return orig(self, rev)
+
+def filectxcmp(orig, self, fctx):
+ """returns True if text is different than fctx"""
+ # some fctx (ex. hg-git) is not based on basefilectx and do not have islfs
+ if self.islfs() and getattr(fctx, 'islfs', lambda: False)():
+ # fast path: check LFS oid
+ p1 = pointer.deserialize(self.rawdata())
+ p2 = pointer.deserialize(fctx.rawdata())
+ return p1.oid() != p2.oid()
+ return orig(self, fctx)
+
+def filectxisbinary(orig, self):
+ if self.islfs():
+ # fast path: use lfs metadata to answer isbinary
+ metadata = pointer.deserialize(self.rawdata())
+ # if lfs metadata says nothing, assume it's binary by default
+ return bool(int(metadata.get('x-is-binary', 1)))
+ return orig(self)
+
+def filectxislfs(self):
+ return _islfs(self.filelog(), self.filenode())
+
+def _updatecatformatter(orig, fm, ctx, matcher, path, decode):
+ orig(fm, ctx, matcher, path, decode)
+ fm.data(rawdata=ctx[path].rawdata())
+
+def convertsink(orig, sink):
+ sink = orig(sink)
+ if sink.repotype == 'hg':
+ class lfssink(sink.__class__):
+ def putcommit(self, files, copies, parents, commit, source, revmap,
+ full, cleanp2):
+ pc = super(lfssink, self).putcommit
+ node = pc(files, copies, parents, commit, source, revmap, full,
+ cleanp2)
+
+ if 'lfs' not in self.repo.requirements:
+ ctx = self.repo[node]
+
+ # The file list may contain removed files, so check for
+ # membership before assuming it is in the context.
+ if any(f in ctx and ctx[f].islfs() for f, n in files):
+ self.repo.requirements.add('lfs')
+ self.repo._writerequirements()
+
+ # Permanently enable lfs locally
+ self.repo.vfs.append(
+ 'hgrc', util.tonativeeol('\n[extensions]\nlfs=\n'))
+
+ return node
+
+ sink.__class__ = lfssink
+
+ return sink
+
+def vfsinit(orig, self, othervfs):
+ orig(self, othervfs)
+ # copy lfs related options
+ for k, v in othervfs.options.items():
+ if k.startswith('lfs'):
+ self.options[k] = v
+ # also copy lfs blobstores. note: this can run before reposetup, so lfs
+ # blobstore attributes are not always ready at this time.
+ for name in ['lfslocalblobstore', 'lfsremoteblobstore']:
+ if util.safehasattr(othervfs, name):
+ setattr(self, name, getattr(othervfs, name))
+
+def hgclone(orig, ui, opts, *args, **kwargs):
+ result = orig(ui, opts, *args, **kwargs)
+
+ if result is not None:
+ sourcerepo, destrepo = result
+ repo = destrepo.local()
+
+ # When cloning to a remote repo (like through SSH), no repo is available
+ # from the peer. Therefore the hgrc can't be updated.
+ if not repo:
+ return result
+
+ # If lfs is required for this repo, permanently enable it locally
+ if 'lfs' in repo.requirements:
+ repo.vfs.append('hgrc',
+ util.tonativeeol('\n[extensions]\nlfs=\n'))
+
+ return result
+
+def hgpostshare(orig, sourcerepo, destrepo, bookmarks=True, defaultpath=None):
+ orig(sourcerepo, destrepo, bookmarks, defaultpath)
+
+ # If lfs is required for this repo, permanently enable it locally
+ if 'lfs' in destrepo.requirements:
+ destrepo.vfs.append('hgrc', util.tonativeeol('\n[extensions]\nlfs=\n'))
+
+def _canskipupload(repo):
+ # if remotestore is a null store, upload is a no-op and can be skipped
+ return isinstance(repo.svfs.lfsremoteblobstore, blobstore._nullremote)
+
+def candownload(repo):
+ # if remotestore is a null store, downloads will lead to nothing
+ return not isinstance(repo.svfs.lfsremoteblobstore, blobstore._nullremote)
+
+def uploadblobsfromrevs(repo, revs):
+ '''upload lfs blobs introduced by revs
+
+ Note: also used by other extensions e. g. infinitepush. avoid renaming.
+ '''
+ if _canskipupload(repo):
+ return
+ pointers = extractpointers(repo, revs)
+ uploadblobs(repo, pointers)
+
+def prepush(pushop):
+ """Prepush hook.
+
+ Read through the revisions to push, looking for filelog entries that can be
+ deserialized into metadata so that we can block the push on their upload to
+ the remote blobstore.
+ """
+ return uploadblobsfromrevs(pushop.repo, pushop.outgoing.missing)
+
+def push(orig, repo, remote, *args, **kwargs):
+ """bail on push if the extension isn't enabled on remote when needed"""
+ if 'lfs' in repo.requirements:
+ # If the remote peer is for a local repo, the requirement tests in the
+ # base class method enforce lfs support. Otherwise, some revisions in
+ # this repo use lfs, and the remote repo needs the extension loaded.
+ if not remote.local() and not remote.capable('lfs'):
+ # This is a copy of the message in exchange.push() when requirements
+ # are missing between local repos.
+ m = _("required features are not supported in the destination: %s")
+ raise error.Abort(m % 'lfs',
+ hint=_('enable the lfs extension on the server'))
+ return orig(repo, remote, *args, **kwargs)
+
+def writenewbundle(orig, ui, repo, source, filename, bundletype, outgoing,
+ *args, **kwargs):
+ """upload LFS blobs added by outgoing revisions on 'hg bundle'"""
+ uploadblobsfromrevs(repo, outgoing.missing)
+ return orig(ui, repo, source, filename, bundletype, outgoing, *args,
+ **kwargs)
+
+def extractpointers(repo, revs):
+ """return a list of lfs pointers added by given revs"""
+ repo.ui.debug('lfs: computing set of blobs to upload\n')
+ pointers = {}
+ for r in revs:
+ ctx = repo[r]
+ for p in pointersfromctx(ctx).values():
+ pointers[p.oid()] = p
+ return sorted(pointers.values())
+
+def pointersfromctx(ctx):
+ """return a dict {path: pointer} for given single changectx"""
+ result = {}
+ for f in ctx.files():
+ if f not in ctx:
+ continue
+ fctx = ctx[f]
+ if not _islfs(fctx.filelog(), fctx.filenode()):
+ continue
+ try:
+ result[f] = pointer.deserialize(fctx.rawdata())
+ except pointer.InvalidPointer as ex:
+ raise error.Abort(_('lfs: corrupted pointer (%s@%s): %s\n')
+ % (f, short(ctx.node()), ex))
+ return result
+
+def uploadblobs(repo, pointers):
+ """upload given pointers from local blobstore"""
+ if not pointers:
+ return
+
+ remoteblob = repo.svfs.lfsremoteblobstore
+ remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore)
+
+def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements):
+ orig(ui, srcrepo, dstrepo, requirements)
+
+ srclfsvfs = srcrepo.svfs.lfslocalblobstore.vfs
+ dstlfsvfs = dstrepo.svfs.lfslocalblobstore.vfs
+
+ for dirpath, dirs, files in srclfsvfs.walk():
+ for oid in files:
+ ui.write(_('copying lfs blob %s\n') % oid)
+ lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid))
+
+def upgraderequirements(orig, repo):
+ reqs = orig(repo)
+ if 'lfs' in repo.requirements:
+ reqs.add('lfs')
+ return reqs
--- a/hgext/logtoprocess.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/logtoprocess.py Mon Jan 22 17:53:02 2018 -0500
@@ -124,8 +124,6 @@
env = dict(itertools.chain(encoding.environ.items(),
msgpairs, optpairs),
EVENT=event, HGPID=str(os.getpid()))
- # Connect stdin to /dev/null to prevent child processes messing
- # with mercurial's stdin.
runshellcommand(script, env)
return super(logtoprocessui, self).log(event, *msg, **opts)
--- a/hgext/mq.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/mq.py Mon Jan 22 17:53:02 2018 -0500
@@ -565,7 +565,7 @@
return index
return None
- guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
+ guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
def parseseries(self):
self.series = []
--- a/hgext/notify.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/notify.py Mon Jan 22 17:53:02 2018 -0500
@@ -135,6 +135,7 @@
from __future__ import absolute_import
import email
+import email.parser as emailparser
import fnmatch
import socket
import time
@@ -339,7 +340,7 @@
'and revset\n')
return
- p = email.Parser.Parser()
+ p = emailparser.Parser()
try:
msg = p.parsestr(data)
except email.Errors.MessageParseError as inst:
--- a/hgext/patchbomb.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/patchbomb.py Mon Jan 22 17:53:02 2018 -0500
@@ -89,6 +89,7 @@
mail,
node as nodemod,
patch,
+ pycompat,
registrar,
repair,
scmutil,
@@ -318,7 +319,7 @@
tmpfn = os.path.join(tmpdir, 'bundle')
btype = ui.config('patchbomb', 'bundletype')
if btype:
- opts['type'] = btype
+ opts[r'type'] = btype
try:
commands.bundle(ui, repo, tmpfn, dest, **opts)
return util.readfile(tmpfn)
@@ -338,8 +339,8 @@
the user through the editor.
"""
ui = repo.ui
- if opts.get('desc'):
- body = open(opts.get('desc')).read()
+ if opts.get(r'desc'):
+ body = open(opts.get(r'desc')).read()
else:
ui.write(_('\nWrite the introductory message for the '
'patch series.\n\n'))
@@ -359,21 +360,21 @@
"""
ui = repo.ui
_charsets = mail._charsets(ui)
- subj = (opts.get('subject')
+ subj = (opts.get(r'subject')
or prompt(ui, 'Subject:', 'A bundle for your repository'))
body = _getdescription(repo, '', sender, **opts)
msg = emailmod.MIMEMultipart.MIMEMultipart()
if body:
- msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
+ msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
datapart.set_payload(bundle)
- bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
+ bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
datapart.add_header('Content-Disposition', 'attachment',
filename=bundlename)
emailmod.Encoders.encode_base64(datapart)
msg.attach(datapart)
- msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
+ msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
return [(msg, subj, None)]
def _makeintro(repo, sender, revs, patches, **opts):
@@ -384,9 +385,9 @@
_charsets = mail._charsets(ui)
# use the last revision which is likely to be a bookmarked head
- prefix = _formatprefix(ui, repo, revs.last(), opts.get('flag'),
+ prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
0, len(patches), numbered=True)
- subj = (opts.get('subject') or
+ subj = (opts.get(r'subject') or
prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
if not subj:
return None # skip intro if the user doesn't bother
@@ -394,7 +395,7 @@
subj = prefix + ' ' + subj
body = ''
- if opts.get('diffstat'):
+ if opts.get(r'diffstat'):
# generate a cumulative diffstat of the whole patch series
diffstat = patch.diffstat(sum(patches, []))
body = '\n' + diffstat
@@ -402,9 +403,9 @@
diffstat = None
body = _getdescription(repo, body, sender, **opts)
- msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
+ msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
msg['Subject'] = mail.headencode(ui, subj, _charsets,
- opts.get('test'))
+ opts.get(r'test'))
return (msg, subj, diffstat)
def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
@@ -414,6 +415,7 @@
This function returns a list of "email" tuples (subject, content, None).
"""
+ bytesopts = pycompat.byteskwargs(opts)
ui = repo.ui
_charsets = mail._charsets(ui)
patches = list(_getpatches(repo, revs, **opts))
@@ -423,7 +425,7 @@
% len(patches))
# build the intro message, or skip it if the user declines
- if introwanted(ui, opts, len(patches)):
+ if introwanted(ui, bytesopts, len(patches)):
msg = _makeintro(repo, sender, revs, patches, **opts)
if msg:
msgs.append(msg)
@@ -437,8 +439,8 @@
for i, (r, p) in enumerate(zip(revs, patches)):
if patchnames:
name = patchnames[i]
- msg = makepatch(ui, repo, r, p, opts, _charsets, i + 1,
- len(patches), numbered, name)
+ msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
+ i + 1, len(patches), numbered, name)
msgs.append(msg)
return msgs
@@ -452,7 +454,7 @@
revs = [r for r in revs if r >= 0]
if not revs:
- revs = [len(repo) - 1]
+ revs = [repo.changelog.tiprev()]
revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
if not revs:
ui.status(_("no changes found\n"))
@@ -579,6 +581,7 @@
Before using this command, you will need to enable email in your
hgrc. See the [email] section in hgrc(5) for details.
'''
+ opts = pycompat.byteskwargs(opts)
_charsets = mail._charsets(ui)
@@ -629,7 +632,7 @@
# check if revision exist on the public destination
publicurl = repo.ui.config('patchbomb', 'publicurl')
if publicurl:
- repo.ui.debug('checking that revision exist in the public repo')
+ repo.ui.debug('checking that revision exist in the public repo\n')
try:
publicpeer = hg.peer(repo, {}, publicurl)
except error.RepoError:
@@ -637,7 +640,7 @@
% publicurl)
raise
if not publicpeer.capable('known'):
- repo.ui.debug('skipping existence checks: public repo too old')
+ repo.ui.debug('skipping existence checks: public repo too old\n')
else:
out = [repo[r] for r in revs]
known = publicpeer.known(h.node() for h in out)
@@ -672,12 +675,13 @@
prompt(ui, 'From', ui.username()))
if bundle:
- bundledata = _getbundle(repo, dest, **opts)
- bundleopts = opts.copy()
- bundleopts.pop('bundle', None) # already processed
+ stropts = pycompat.strkwargs(opts)
+ bundledata = _getbundle(repo, dest, **stropts)
+ bundleopts = stropts.copy()
+ bundleopts.pop(r'bundle', None) # already processed
msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
else:
- msgs = _getpatchmsgs(repo, sender, revs, **opts)
+ msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
showaddrs = []
--- a/hgext/rebase.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/rebase.py Mon Jan 22 17:53:02 2018 -0500
@@ -21,7 +21,6 @@
from mercurial.i18n import _
from mercurial.node import (
- hex,
nullid,
nullrev,
short,
@@ -43,6 +42,7 @@
obsutil,
patch,
phases,
+ pycompat,
registrar,
repair,
revset,
@@ -53,7 +53,6 @@
)
release = lock.release
-templateopts = cmdutil.templateopts
# The following constants are used throughout the rebase module. The ordering of
# their values must be maintained.
@@ -137,7 +136,7 @@
class rebaseruntime(object):
"""This class is a container for rebase runtime state"""
- def __init__(self, repo, ui, opts=None):
+ def __init__(self, repo, ui, inmemory=False, opts=None):
if opts is None:
opts = {}
@@ -179,6 +178,8 @@
# other extensions
self.keepopen = opts.get('keepopen', False)
self.obsoletenotrebased = {}
+ self.obsoletewithoutsuccessorindestination = set()
+ self.inmemory = inmemory
@property
def repo(self):
@@ -311,9 +312,10 @@
if not self.ui.configbool('experimental', 'rebaseskipobsolete'):
return
obsoleteset = set(obsoleterevs)
- self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
- obsoleteset, destmap)
+ self.obsoletenotrebased, self.obsoletewithoutsuccessorindestination = \
+ _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
skippedset = set(self.obsoletenotrebased)
+ skippedset.update(self.obsoletewithoutsuccessorindestination)
_checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
def _prepareabortorcontinue(self, isabort):
@@ -380,7 +382,18 @@
self.prepared = True
+ def _assignworkingcopy(self):
+ if self.inmemory:
+ from mercurial.context import overlayworkingctx
+ self.wctx = overlayworkingctx(self.repo)
+ self.repo.ui.debug("rebasing in-memory\n")
+ else:
+ self.wctx = self.repo[None]
+ self.repo.ui.debug("rebasing on disk\n")
+ self.repo.ui.log("rebase", "", rebase_imm_used=self.wctx.isinmemory())
+
def _performrebase(self, tr):
+ self._assignworkingcopy()
repo, ui = self.repo, self.ui
if self.keepbranchesf:
# insert _savebranch at the start of extrafns so if
@@ -419,12 +432,26 @@
def _performrebasesubset(self, tr, subset, pos, total):
repo, ui, opts = self.repo, self.ui, self.opts
sortedrevs = repo.revs('sort(%ld, -topo)', subset)
+ allowdivergence = self.ui.configbool(
+ 'experimental', 'evolution.allowdivergence')
+ if not allowdivergence:
+ sortedrevs -= repo.revs(
+ 'descendants(%ld) and not %ld',
+ self.obsoletewithoutsuccessorindestination,
+ self.obsoletewithoutsuccessorindestination,
+ )
for rev in sortedrevs:
dest = self.destmap[rev]
ctx = repo[rev]
desc = _ctxdesc(ctx)
if self.state[rev] == rev:
ui.status(_('already rebased %s\n') % desc)
+ elif (not allowdivergence
+ and rev in self.obsoletewithoutsuccessorindestination):
+ msg = _('note: not rebasing %s and its descendants as '
+ 'this would cause divergence\n') % desc
+ repo.ui.status(msg)
+ self.skipped.add(rev)
elif rev in self.obsoletenotrebased:
succ = self.obsoletenotrebased[rev]
if succ is None:
@@ -459,22 +486,35 @@
ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
'rebase')
stats = rebasenode(repo, rev, p1, base, self.state,
- self.collapsef, dest)
+ self.collapsef, dest, wctx=self.wctx)
if stats and stats[3] > 0:
- raise error.InterventionRequired(
- _('unresolved conflicts (see hg '
- 'resolve, then hg rebase --continue)'))
+ if self.wctx.isinmemory():
+ raise error.InMemoryMergeConflictsError()
+ else:
+ raise error.InterventionRequired(
+ _('unresolved conflicts (see hg '
+ 'resolve, then hg rebase --continue)'))
finally:
ui.setconfig('ui', 'forcemerge', '', 'rebase')
if not self.collapsef:
merging = p2 != nullrev
editform = cmdutil.mergeeditform(merging, 'rebase')
editor = cmdutil.getcommiteditor(editform=editform, **opts)
- newnode = concludenode(repo, rev, p1, p2,
- extrafn=_makeextrafn(self.extrafns),
- editor=editor,
- keepbranches=self.keepbranchesf,
- date=self.date)
+ if self.wctx.isinmemory():
+ newnode = concludememorynode(repo, rev, p1, p2,
+ wctx=self.wctx,
+ extrafn=_makeextrafn(self.extrafns),
+ editor=editor,
+ keepbranches=self.keepbranchesf,
+ date=self.date)
+ mergemod.mergestate.clean(repo)
+ else:
+ newnode = concludenode(repo, rev, p1, p2,
+ extrafn=_makeextrafn(self.extrafns),
+ editor=editor,
+ keepbranches=self.keepbranchesf,
+ date=self.date)
+
if newnode is None:
# If it ended up being a no-op commit, then the normal
# merge state clean-up path doesn't happen, so do it
@@ -482,7 +522,10 @@
mergemod.mergestate.clean(repo)
else:
# Skip commit if we are collapsing
- repo.setparents(repo[p1].node())
+ if self.wctx.isinmemory():
+ self.wctx.setbase(repo[p1])
+ else:
+ repo.setparents(repo[p1].node())
newnode = None
# Update the state
if newnode is not None:
@@ -522,15 +565,24 @@
revtoreuse = max(self.state)
dsguard = None
- if ui.configbool('rebase', 'singletransaction'):
- dsguard = dirstateguard.dirstateguard(repo, 'rebase')
- with util.acceptintervention(dsguard):
- newnode = concludenode(repo, revtoreuse, p1, self.external,
- commitmsg=commitmsg,
- extrafn=_makeextrafn(self.extrafns),
- editor=editor,
- keepbranches=self.keepbranchesf,
- date=self.date)
+ if self.inmemory:
+ newnode = concludememorynode(repo, revtoreuse, p1,
+ self.external,
+ commitmsg=commitmsg,
+ extrafn=_makeextrafn(self.extrafns),
+ editor=editor,
+ keepbranches=self.keepbranchesf,
+ date=self.date, wctx=self.wctx)
+ else:
+ if ui.configbool('rebase', 'singletransaction'):
+ dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+ with util.acceptintervention(dsguard):
+ newnode = concludenode(repo, revtoreuse, p1, self.external,
+ commitmsg=commitmsg,
+ extrafn=_makeextrafn(self.extrafns),
+ editor=editor,
+ keepbranches=self.keepbranchesf,
+ date=self.date)
if newnode is not None:
newrev = repo[newnode].rev()
for oldrev in self.state.iterkeys():
@@ -545,7 +597,8 @@
if newwd < 0:
# original directory is a parent of rebase set root or ignored
newwd = self.originalwd
- if newwd not in [c.rev() for c in repo[None].parents()]:
+ if (newwd not in [c.rev() for c in repo[None].parents()] and
+ not self.inmemory):
ui.note(_("update back to initial working directory parent\n"))
hg.updaterepo(repo, newwd, False)
@@ -594,7 +647,7 @@
('t', 'tool', '', _('specify merge tool')),
('c', 'continue', False, _('continue an interrupted rebase')),
('a', 'abort', False, _('abort an interrupted rebase'))] +
- templateopts,
+ cmdutil.formatteropts,
_('[-s REV | -b REV] [-d REV] [OPTION]'))
def rebase(ui, repo, **opts):
"""move changeset (and descendants) to a different branch
@@ -628,6 +681,11 @@
4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
rebase will use ``--base .`` as above.
+ If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
+ can be used in ``--dest``. Destination would be calculated per source
+ revision with ``SRC`` substituted by that single source revision and
+ ``ALLSRC`` substituted by all source revisions.
+
Rebase will destroy original changesets unless you use ``--keep``.
It will also move your bookmarks (even if you do).
@@ -676,6 +734,12 @@
hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
+ - stabilize orphaned changesets so history looks linear::
+
+ hg rebase -r 'orphan()-obsolete()'\
+ -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
+ max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
+
Configuration Options:
You can make rebase require a destination if you set the following config
@@ -693,13 +757,43 @@
[rebase]
singletransaction = True
+ By default, rebase writes to the working copy, but you can configure it to
+ run in-memory for for better performance, and to allow it to run if the
+ working copy is dirty::
+
+ [rebase]
+ experimental.inmemory = True
+
Return Values:
Returns 0 on success, 1 if nothing to rebase or there are
unresolved conflicts.
"""
- rbsrt = rebaseruntime(repo, ui, opts)
+ inmemory = ui.configbool('rebase', 'experimental.inmemory')
+ if (opts.get('continue') or opts.get('abort') or
+ repo.currenttransaction() is not None):
+ # in-memory rebase is not compatible with resuming rebases.
+ # (Or if it is run within a transaction, since the restart logic can
+ # fail the entire transaction.)
+ inmemory = False
+
+ if inmemory:
+ try:
+ # in-memory merge doesn't support conflicts, so if we hit any, abort
+ # and re-run as an on-disk merge.
+ return _origrebase(ui, repo, inmemory=inmemory, **opts)
+ except error.InMemoryMergeConflictsError:
+ ui.warn(_('hit merge conflicts; re-running rebase without in-memory'
+ ' merge\n'))
+ _origrebase(ui, repo, **{'abort': True})
+ return _origrebase(ui, repo, inmemory=False, **opts)
+ else:
+ return _origrebase(ui, repo, **opts)
+
+def _origrebase(ui, repo, inmemory=False, **opts):
+ opts = pycompat.byteskwargs(opts)
+ rbsrt = rebaseruntime(repo, ui, inmemory, opts)
with repo.wlock(), repo.lock():
# Validate input and define rebasing points
@@ -746,7 +840,7 @@
if retcode is not None:
return retcode
else:
- destmap = _definedestmap(ui, repo, destf, srcf, basef, revf,
+ destmap = _definedestmap(ui, repo, rbsrt, destf, srcf, basef, revf,
destspace=destspace)
retcode = rbsrt._preparenewrebase(destmap)
if retcode is not None:
@@ -758,16 +852,22 @@
singletr = ui.configbool('rebase', 'singletransaction')
if singletr:
tr = repo.transaction('rebase')
+
+ # If `rebase.singletransaction` is enabled, wrap the entire operation in
+ # one transaction here. Otherwise, transactions are obtained when
+ # committing each node, which is slower but allows partial success.
with util.acceptintervention(tr):
- if singletr:
+ # Same logic for the dirstate guard, except we don't create one when
+ # rebasing in-memory (it's not needed).
+ if singletr and not inmemory:
dsguard = dirstateguard.dirstateguard(repo, 'rebase')
with util.acceptintervention(dsguard):
rbsrt._performrebase(tr)
rbsrt._finishrebase()
-def _definedestmap(ui, repo, destf=None, srcf=None, basef=None, revf=None,
- destspace=None):
+def _definedestmap(ui, repo, rbsrt, destf=None, srcf=None, basef=None,
+ revf=None, destspace=None):
"""use revisions argument to define destmap {srcrev: destrev}"""
if revf is None:
revf = []
@@ -781,8 +881,9 @@
if revf and srcf:
raise error.Abort(_('cannot specify both a revision and a source'))
- cmdutil.checkunfinished(repo)
- cmdutil.bailifchanged(repo)
+ if not rbsrt.inmemory:
+ cmdutil.checkunfinished(repo)
+ cmdutil.bailifchanged(repo)
if ui.configbool('commands', 'rebase.requiredest') and not destf:
raise error.Abort(_('you must specify a destination'),
@@ -855,6 +956,23 @@
ui.status(_('nothing to rebase from %s to %s\n') %
('+'.join(str(repo[r]) for r in base), dest))
return None
+ # If rebasing the working copy parent, force in-memory merge to be off.
+ #
+ # This is because the extra work of checking out the newly rebased commit
+ # outweights the benefits of rebasing in-memory, and executing an extra
+ # update command adds a bit of overhead, so better to just do it on disk. In
+ # all other cases leave it on.
+ #
+ # Note that there are cases where this isn't true -- e.g., rebasing large
+ # stacks that include the WCP. However, I'm not yet sure where the cutoff
+ # is.
+ rebasingwcp = repo['.'].rev() in rebaseset
+ ui.log("rebase", "", rebase_rebasing_wcp=rebasingwcp)
+ if rbsrt.inmemory and rebasingwcp:
+ rbsrt.inmemory = False
+ # Check these since we did not before.
+ cmdutil.checkunfinished(repo)
+ cmdutil.bailifchanged(repo)
if not destf:
dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
@@ -868,8 +986,6 @@
# fast path: try to resolve dest without SRC alias
dest = scmutil.revsingle(repo, destf, localalias=alias)
except error.RepoLookupError:
- if not ui.configbool('experimental', 'rebase.multidest'):
- raise
# multi-dest path: resolve dest for each SRC separately
destmap = {}
for r in rebaseset:
@@ -920,6 +1036,44 @@
(max(destancestors),
', '.join(str(p) for p in sorted(parents))))
+def concludememorynode(repo, rev, p1, p2, wctx=None,
+ commitmsg=None, editor=None, extrafn=None,
+ keepbranches=False, date=None):
+ '''Commit the memory changes with parents p1 and p2. Reuse commit info from
+ rev but also store useful information in extra.
+ Return node of committed revision.'''
+ ctx = repo[rev]
+ if commitmsg is None:
+ commitmsg = ctx.description()
+ keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
+ extra = {'rebase_source': ctx.hex()}
+ if extrafn:
+ extrafn(ctx, extra)
+
+ destphase = max(ctx.phase(), phases.draft)
+ overrides = {('phases', 'new-commit'): destphase}
+ with repo.ui.configoverride(overrides, 'rebase'):
+ if keepbranch:
+ repo.ui.setconfig('ui', 'allowemptycommit', True)
+ # Replicates the empty check in ``repo.commit``.
+ if wctx.isempty() and not repo.ui.configbool('ui', 'allowemptycommit'):
+ return None
+
+ if date is None:
+ date = ctx.date()
+
+ # By convention, ``extra['branch']`` (set by extrafn) clobbers
+ # ``branch`` (used when passing ``--keepbranches``).
+ branch = repo[p1].branch()
+ if 'branch' in extra:
+ branch = extra['branch']
+
+ memctx = wctx.tomemctx(commitmsg, parents=(p1, p2), date=date,
+ extra=extra, user=ctx.user(), branch=branch, editor=editor)
+ commitres = repo.commitctx(memctx)
+ wctx.clean() # Might be reused
+ return commitres
+
def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
keepbranches=False, date=None):
'''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
@@ -952,24 +1106,29 @@
repo.dirstate.setbranch(repo[newnode].branch())
return newnode
-def rebasenode(repo, rev, p1, base, state, collapse, dest):
+def rebasenode(repo, rev, p1, base, state, collapse, dest, wctx):
'Rebase a single revision rev on top of p1 using base as merge ancestor'
# Merge phase
# Update to destination and merge it with local
- if repo['.'].rev() != p1:
- repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
- mergemod.update(repo, p1, False, True)
+ if wctx.isinmemory():
+ wctx.setbase(repo[p1])
else:
- repo.ui.debug(" already in destination\n")
- repo.dirstate.write(repo.currenttransaction())
+ if repo['.'].rev() != p1:
+ repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
+ mergemod.update(repo, p1, False, True)
+ else:
+ repo.ui.debug(" already in destination\n")
+ # This is, alas, necessary to invalidate workingctx's manifest cache,
+ # as well as other data we litter on it in other places.
+ wctx = repo[None]
+ repo.dirstate.write(repo.currenttransaction())
repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
if base is not None:
repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
# When collapsing in-place, the parent is the common ancestor, we
# have to allow merging with it.
- wctx = repo[None]
stats = mergemod.update(repo, rev, True, True, base, collapse,
- labels=['dest', 'source'])
+ labels=['dest', 'source'], wc=wctx)
if collapse:
copies.duplicatecopies(repo, wctx, rev, dest)
else:
@@ -1546,22 +1705,26 @@
replacements[oldnode] = succs
scmutil.cleanupnodes(repo, replacements, 'rebase', moves)
if fm:
- nodechanges = {hex(oldn): [hex(n) for n in newn]
- for oldn, newn in replacements.iteritems()}
+ hf = fm.hexfunc
+ fl = fm.formatlist
+ fd = fm.formatdict
+ nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
+ for oldn, newn in replacements.iteritems()},
+ key="oldnode", value="newnodes")
fm.data(nodechanges=nodechanges)
def pullrebase(orig, ui, repo, *args, **opts):
'Call rebase after pull if the latter has been invoked with --rebase'
ret = None
- if opts.get('rebase'):
+ if opts.get(r'rebase'):
if ui.configbool('commands', 'rebase.requiredest'):
msg = _('rebase destination required by configuration')
hint = _('use hg pull followed by hg rebase -d DEST')
raise error.Abort(msg, hint=hint)
with repo.wlock(), repo.lock():
- if opts.get('update'):
- del opts['update']
+ if opts.get(r'update'):
+ del opts[r'update']
ui.debug('--update and --rebase are not compatible, ignoring '
'the update flag\n')
@@ -1582,15 +1745,15 @@
if revspostpull > revsprepull:
# --rev option from pull conflict with rebase own --rev
# dropping it
- if 'rev' in opts:
- del opts['rev']
+ if r'rev' in opts:
+ del opts[r'rev']
# positional argument from pull conflicts with rebase's own
# --source.
- if 'source' in opts:
- del opts['source']
+ if r'source' in opts:
+ del opts[r'source']
# revsprepull is the len of the repo, not revnum of tip.
destspace = list(repo.changelog.revs(start=revsprepull))
- opts['_destspace'] = destspace
+ opts[r'_destspace'] = destspace
try:
rebase(ui, repo, **opts)
except error.NoMergeDestAbort:
@@ -1604,7 +1767,7 @@
# with warning and trumpets
commands.update(ui, repo)
else:
- if opts.get('tool'):
+ if opts.get(r'tool'):
raise error.Abort(_('--tool can only be used with --rebase'))
ret = orig(ui, repo, *args, **opts)
@@ -1615,11 +1778,16 @@
return set(r for r in revs if repo[r].obsolete())
def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
- """return a mapping obsolete => successor for all obsolete nodes to be
- rebased that have a successors in the destination
+ """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
+
+ `obsoletenotrebased` is a mapping mapping obsolete => successor for all
+ obsolete nodes to be rebased given in `rebaseobsrevs`.
- obsolete => None entries in the mapping indicate nodes with no successor"""
+ `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
+ without a successor in destination.
+ """
obsoletenotrebased = {}
+ obsoletewithoutsuccessorindestination = set([])
assert repo.filtername is None
cl = repo.changelog
@@ -1640,8 +1808,15 @@
if cl.isancestor(succnode, destnode):
obsoletenotrebased[srcrev] = nodemap[succnode]
break
+ else:
+ # If 'srcrev' has a successor in rebase set but none in
+ # destination (which would be catched above), we shall skip it
+ # and its descendants to avoid divergence.
+ if any(nodemap[s] in destmap
+ for s in successors if s != srcnode):
+ obsoletewithoutsuccessorindestination.add(srcrev)
- return obsoletenotrebased
+ return obsoletenotrebased, obsoletewithoutsuccessorindestination
def summaryhook(ui, repo):
if not repo.vfs.exists('rebasestate'):
--- a/hgext/record.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/record.py Mon Jan 22 17:53:02 2018 -0500
@@ -68,13 +68,13 @@
raise error.Abort(_('running non-interactively, use %s instead') %
'commit')
- opts["interactive"] = True
+ opts[r"interactive"] = True
overrides = {('experimental', 'crecord'): False}
with ui.configoverride(overrides, 'record'):
return commands.commit(ui, repo, *pats, **opts)
def qrefresh(origfn, ui, repo, *pats, **opts):
- if not opts['interactive']:
+ if not opts[r'interactive']:
return origfn(ui, repo, *pats, **opts)
mq = extensions.find('mq')
@@ -112,7 +112,7 @@
repo.mq.checkpatchname(patch)
def committomq(ui, repo, *pats, **opts):
- opts['checkname'] = False
+ opts[r'checkname'] = False
mq.new(ui, repo, patch, *pats, **opts)
overrides = {('experimental', 'crecord'): False}
@@ -121,7 +121,7 @@
cmdutil.recordfilter, *pats, **opts)
def qnew(origfn, ui, repo, patch, *args, **opts):
- if opts['interactive']:
+ if opts[r'interactive']:
return _qrecord(None, ui, repo, patch, *args, **opts)
return origfn(ui, repo, patch, *args, **opts)
--- a/hgext/releasenotes.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/releasenotes.py Mon Jan 22 17:53:02 2018 -0500
@@ -25,6 +25,7 @@
error,
minirst,
node,
+ pycompat,
registrar,
scmutil,
util,
@@ -570,6 +571,8 @@
admonitions along with their title. This also includes the custom
admonitions (if any).
"""
+
+ opts = pycompat.byteskwargs(opts)
sections = releasenotessections(ui, repo)
listflag = opts.get('list')
--- a/hgext/shelve.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/shelve.py Mon Jan 22 17:53:02 2018 -0500
@@ -43,6 +43,7 @@
node as nodemod,
patch,
phases,
+ pycompat,
registrar,
repair,
scmutil,
@@ -380,7 +381,7 @@
editor_ = False
if editor:
editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
- **opts)
+ **pycompat.strkwargs(opts))
with repo.ui.configoverride(overrides):
return repo.commit(message, shelveuser, opts.get('date'),
match, editor=editor_, extra=extra)
@@ -389,6 +390,7 @@
repo.mq.checkapplied = saved
def interactivecommitfunc(ui, repo, *pats, **opts):
+ opts = pycompat.byteskwargs(opts)
match = scmutil.match(repo['.'], pats, {})
message = opts['message']
return commitfunc(ui, repo, message, match, opts)
@@ -465,7 +467,7 @@
else:
node = cmdutil.dorecord(ui, repo, commitfunc, None,
False, cmdutil.recordfilter, *pats,
- **opts)
+ **pycompat.strkwargs(opts))
if not node:
_nothingtoshelvemessaging(ui, repo, pats, opts)
return 1
@@ -852,6 +854,7 @@
return _dounshelve(ui, repo, *shelved, **opts)
def _dounshelve(ui, repo, *shelved, **opts):
+ opts = pycompat.byteskwargs(opts)
abortf = opts.get('abort')
continuef = opts.get('continue')
if not abortf and not continuef:
@@ -1010,6 +1013,7 @@
To delete specific shelved changes, use ``--delete``. To delete
all shelved changes, use ``--cleanup``.
'''
+ opts = pycompat.byteskwargs(opts)
allowables = [
('addremove', {'create'}), # 'create' is pseudo action
('unknown', {'create'}),
--- a/hgext/show.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/show.py Mon Jan 22 17:53:02 2018 -0500
@@ -28,7 +28,10 @@
from __future__ import absolute_import
from mercurial.i18n import _
-from mercurial.node import nullrev
+from mercurial.node import (
+ hex,
+ nullrev,
+)
from mercurial import (
cmdutil,
commands,
@@ -252,7 +255,9 @@
# our simplicity and the customizations required.
# TODO use proper graph symbols from graphmod
- shortesttmpl = formatter.maketemplater(ui, '{shortest(node, %d)}' % nodelen)
+ tres = formatter.templateresources(ui, repo)
+ shortesttmpl = formatter.maketemplater(ui, '{shortest(node, %d)}' % nodelen,
+ resources=tres)
def shortest(ctx):
return shortesttmpl.render({'ctx': ctx, 'node': ctx.hex()})
@@ -438,14 +443,11 @@
If we fail to do this, a value of e.g. ``10023`` could mean either
revision 10023 or node ``10023abc...``.
"""
- tmpl = formatter.maketemplater(repo.ui, '{shortest(node, %d)}' % minlen)
- lens = [minlen]
- for rev in revs:
- ctx = repo[rev]
- shortest = tmpl.render({'ctx': ctx, 'node': ctx.hex()})
- lens.append(len(shortest))
-
- return max(lens)
+ if not revs:
+ return minlen
+ # don't use filtered repo because it's slow. see templater.shortest().
+ cl = repo.unfiltered().changelog
+ return max(len(cl.shortest(hex(cl.node(r)), minlen)) for r in revs)
# Adjust the docstring of the show command so it shows all registered views.
# This is a bit hacky because it runs at the end of module load. When moved
--- a/hgext/sparse.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/sparse.py Mon Jan 22 17:53:02 2018 -0500
@@ -82,6 +82,7 @@
extensions,
hg,
match as matchmod,
+ pycompat,
registrar,
sparse,
util,
@@ -286,6 +287,7 @@
Returns 0 if editing the sparse checkout succeeds.
"""
+ opts = pycompat.byteskwargs(opts)
include = opts.get('include')
exclude = opts.get('exclude')
force = opts.get('force')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/split.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,177 @@
+# split.py - split a changeset into smaller ones
+#
+# Copyright 2015 Laurent Charignon <lcharignon@fb.com>
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""command to split a changeset into smaller ones (EXPERIMENTAL)"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+
+from mercurial.node import (
+ nullid,
+ short,
+)
+
+from mercurial import (
+ bookmarks,
+ cmdutil,
+ commands,
+ error,
+ hg,
+ obsolete,
+ phases,
+ registrar,
+ revsetlang,
+ scmutil,
+)
+
+# allow people to use split without explicitly enabling rebase extension
+from . import (
+ rebase,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+@command('^split',
+ [('r', 'rev', '', _("revision to split"), _('REV')),
+ ('', 'rebase', True, _('rebase descendants after split')),
+ ] + cmdutil.commitopts2,
+ _('hg split [--no-rebase] [[-r] REV]'))
+def split(ui, repo, *revs, **opts):
+ """split a changeset into smaller ones
+
+ Repeatedly prompt changes and commit message for new changesets until there
+ is nothing left in the original changeset.
+
+ If --rev was not given, split the working directory parent.
+
+ By default, rebase connected non-obsoleted descendants onto the new
+ changeset. Use --no-rebase to avoid the rebase.
+ """
+ revlist = []
+ if opts.get('rev'):
+ revlist.append(opts.get('rev'))
+ revlist.extend(revs)
+ with repo.wlock(), repo.lock(), repo.transaction('split') as tr:
+ revs = scmutil.revrange(repo, revlist or ['.'])
+ if len(revs) > 1:
+ raise error.Abort(_('cannot split multiple revisions'))
+
+ rev = revs.first()
+ ctx = repo[rev]
+ if rev is None or ctx.node() == nullid:
+ ui.status(_('nothing to split\n'))
+ return 1
+ if ctx.node() is None:
+ raise error.Abort(_('cannot split working directory'))
+
+ # rewriteutil.precheck is not very useful here because:
+ # 1. null check is done above and it's more friendly to return 1
+ # instead of abort
+ # 2. mergestate check is done below by cmdutil.bailifchanged
+ # 3. unstable check is more complex here because of --rebase
+ #
+ # So only "public" check is useful and it's checked directly here.
+ if ctx.phase() == phases.public:
+ raise error.Abort(_('cannot split public changeset'),
+ hint=_("see 'hg help phases' for details"))
+
+ descendants = list(repo.revs('(%d::) - (%d)', rev, rev))
+ alloworphaned = obsolete.isenabled(repo, obsolete.allowunstableopt)
+ if opts.get('rebase'):
+ # Skip obsoleted descendants and their descendants so the rebase
+ # won't cause conflicts for sure.
+ torebase = list(repo.revs('%ld - (%ld & obsolete())::',
+ descendants, descendants))
+ if not alloworphaned and len(torebase) != len(descendants):
+ raise error.Abort(_('split would leave orphaned changesets '
+ 'behind'))
+ else:
+ if not alloworphaned and descendants:
+ raise error.Abort(
+ _('cannot split changeset with children without rebase'))
+ torebase = ()
+
+ if len(ctx.parents()) > 1:
+ raise error.Abort(_('cannot split a merge changeset'))
+
+ cmdutil.bailifchanged(repo)
+
+ # Deactivate bookmark temporarily so it won't get moved unintentionally
+ bname = repo._activebookmark
+ if bname and repo._bookmarks[bname] != ctx.node():
+ bookmarks.deactivate(repo)
+
+ wnode = repo['.'].node()
+ top = None
+ try:
+ top = dosplit(ui, repo, tr, ctx, opts)
+ finally:
+ # top is None: split failed, need update --clean recovery.
+ # wnode == ctx.node(): wnode split, no need to update.
+ if top is None or wnode != ctx.node():
+ hg.clean(repo, wnode, show_stats=False)
+ if bname:
+ bookmarks.activate(repo, bname)
+ if torebase and top:
+ dorebase(ui, repo, torebase, top)
+
+def dosplit(ui, repo, tr, ctx, opts):
+ committed = [] # [ctx]
+
+ # Set working parent to ctx.p1(), and keep working copy as ctx's content
+ # NOTE: if we can have "update without touching working copy" API, the
+ # revert step could be cheaper.
+ hg.clean(repo, ctx.p1().node(), show_stats=False)
+ parents = repo.changelog.parents(ctx.node())
+ ui.pushbuffer()
+ cmdutil.revert(ui, repo, ctx, parents)
+ ui.popbuffer() # discard "reverting ..." messages
+
+ # Any modified, added, removed, deleted result means split is incomplete
+ incomplete = lambda repo: any(repo.status()[:4])
+
+ # Main split loop
+ while incomplete(repo):
+ if committed:
+ header = (_('HG: Splitting %s. So far it has been split into:\n')
+ % short(ctx.node()))
+ for c in committed:
+ firstline = c.description().split('\n', 1)[0]
+ header += _('HG: - %s: %s\n') % (short(c.node()), firstline)
+ header += _('HG: Write commit message for the next split '
+ 'changeset.\n')
+ else:
+ header = _('HG: Splitting %s. Write commit message for the '
+ 'first split changeset.\n') % short(ctx.node())
+ opts.update({
+ 'edit': True,
+ 'interactive': True,
+ 'message': header + ctx.description(),
+ })
+ commands.commit(ui, repo, **opts)
+ newctx = repo['.']
+ committed.append(newctx)
+
+ if not committed:
+ raise error.Abort(_('cannot split an empty revision'))
+
+ scmutil.cleanupnodes(repo, {ctx.node(): [c.node() for c in committed]},
+ operation='split')
+
+ return committed[-1]
+
+def dorebase(ui, repo, src, dest):
+ rebase.rebase(ui, repo, rev=[revsetlang.formatspec('%ld', src)],
+ dest=revsetlang.formatspec('%d', dest))
--- a/hgext/uncommit.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/uncommit.py Mon Jan 22 17:53:02 2018 -0500
@@ -28,8 +28,10 @@
copies,
error,
node,
- obsolete,
+ obsutil,
+ pycompat,
registrar,
+ rewriteutil,
scmutil,
)
@@ -75,7 +77,7 @@
if path not in contentctx:
return None
fctx = contentctx[path]
- mctx = context.memfilectx(repo, fctx.path(), fctx.data(),
+ mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(),
fctx.islink(),
fctx.isexec(),
copied=copied.get(path))
@@ -96,15 +98,13 @@
newid = repo.commitctx(new)
return newid
-def _uncommitdirstate(repo, oldctx, match):
- """Fix the dirstate after switching the working directory from
- oldctx to a copy of oldctx not containing changed files matched by
- match.
+def _fixdirstate(repo, oldctx, newctx, status):
+ """ fix the dirstate after switching the working directory from oldctx to
+ newctx which can be result of either unamend or uncommit.
"""
- ctx = repo['.']
ds = repo.dirstate
copies = dict(ds.copies())
- s = repo.status(oldctx.p1(), oldctx, match=match)
+ s = status
for f in s.modified:
if ds[f] == 'r':
# modified + removed -> removed
@@ -136,7 +136,7 @@
for dst, src in oldcopies.iteritems())
# Adjust the dirstate copies
for dst, src in copies.iteritems():
- if (src not in ctx or dst in ctx or ds[dst] != 'a'):
+ if (src not in newctx or dst in newctx or ds[dst] != 'a'):
src = None
ds.copy(src, dst)
@@ -152,25 +152,17 @@
deleted in the changeset will be left unchanged, and so will remain
modified in the working directory.
"""
+ opts = pycompat.byteskwargs(opts)
with repo.wlock(), repo.lock():
- wctx = repo[None]
if not pats and not repo.ui.configbool('experimental',
'uncommitondirtywdir'):
cmdutil.bailifchanged(repo)
- if wctx.parents()[0].node() == node.nullid:
- raise error.Abort(_("cannot uncommit null changeset"))
- if len(wctx.parents()) > 1:
- raise error.Abort(_("cannot uncommit while merging"))
old = repo['.']
- if not old.mutable():
- raise error.Abort(_('cannot uncommit public changesets'))
+ rewriteutil.precheck(repo, [old.rev()], 'uncommit')
if len(old.parents()) > 1:
raise error.Abort(_("cannot uncommit merge changeset"))
- allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
- if not allowunstable and old.children():
- raise error.Abort(_('cannot uncommit changeset with children'))
with repo.transaction('uncommit'):
match = scmutil.match(old, pats, opts)
@@ -191,4 +183,75 @@
with repo.dirstate.parentchange():
repo.dirstate.setparents(newid, node.nullid)
- _uncommitdirstate(repo, old, match)
+ s = repo.status(old.p1(), old, match=match)
+ _fixdirstate(repo, old, repo[newid], s)
+
+def predecessormarkers(ctx):
+ """yields the obsolete markers marking the given changeset as a successor"""
+ for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
+ yield obsutil.marker(ctx.repo(), data)
+
+@command('^unamend', [])
+def unamend(ui, repo, **opts):
+ """
+ undo the most recent amend operation on a current changeset
+
+ This command will roll back to the previous version of a changeset,
+ leaving working directory in state in which it was before running
+ `hg amend` (e.g. files modified as part of an amend will be
+ marked as modified `hg status`)
+ """
+
+ unfi = repo.unfiltered()
+ with repo.wlock(), repo.lock(), repo.transaction('unamend'):
+
+ # identify the commit from which to unamend
+ curctx = repo['.']
+
+ rewriteutil.precheck(repo, [curctx.rev()], 'unamend')
+
+ # identify the commit to which to unamend
+ markers = list(predecessormarkers(curctx))
+ if len(markers) != 1:
+ e = _("changeset must have one predecessor, found %i predecessors")
+ raise error.Abort(e % len(markers))
+
+ prednode = markers[0].prednode()
+ predctx = unfi[prednode]
+
+ # add an extra so that we get a new hash
+ # note: allowing unamend to undo an unamend is an intentional feature
+ extras = predctx.extra()
+ extras['unamend_source'] = curctx.hex()
+
+ def filectxfn(repo, ctx_, path):
+ try:
+ return predctx.filectx(path)
+ except KeyError:
+ return None
+
+ # Make a new commit same as predctx
+ newctx = context.memctx(repo,
+ parents=(predctx.p1(), predctx.p2()),
+ text=predctx.description(),
+ files=predctx.files(),
+ filectxfn=filectxfn,
+ user=predctx.user(),
+ date=predctx.date(),
+ extra=extras)
+ # phase handling
+ commitphase = curctx.phase()
+ overrides = {('phases', 'new-commit'): commitphase}
+ with repo.ui.configoverride(overrides, 'uncommit'):
+ newprednode = repo.commitctx(newctx)
+
+ newpredctx = repo[newprednode]
+ dirstate = repo.dirstate
+
+ with dirstate.parentchange():
+ dirstate.setparents(newprednode, node.nullid)
+ s = repo.status(predctx, curctx)
+ _fixdirstate(repo, curctx, newpredctx, s)
+
+ mapping = {curctx.node(): (newprednode,)}
+ scmutil.cleanupnodes(repo, mapping, 'unamend')
--- a/hgext/win32text.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/win32text.py Mon Jan 22 17:53:02 2018 -0500
@@ -139,7 +139,7 @@
# changegroup that contains an unacceptable commit followed later
# by a commit that fixes the problem.
tip = repo['tip']
- for rev in xrange(len(repo) - 1, repo[node].rev() - 1, -1):
+ for rev in xrange(repo.changelog.tiprev(), repo[node].rev() - 1, -1):
c = repo[rev]
for f in c.files():
if f in seen or f not in tip or f not in c:
--- a/hgext/zeroconf/Zeroconf.py Mon Jan 08 16:07:51 2018 -0800
+++ b/hgext/zeroconf/Zeroconf.py Mon Jan 22 17:53:02 2018 -0500
@@ -1613,7 +1613,8 @@
_DNS_TTL, service.address))
service = self.services.get(question.name.lower(), None)
- if not service: continue
+ if not service:
+ continue
if (question.type == _TYPE_SRV or
question.type == _TYPE_ANY):
--- a/i18n/de.po Mon Jan 08 16:07:51 2018 -0800
+++ b/i18n/de.po Mon Jan 22 17:53:02 2018 -0500
@@ -9744,86 +9744,9 @@
msgid "child process failed to start"
msgstr ""
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "changeset: %s\n"
-msgstr "Änderung: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "branch: %s\n"
-msgstr "Zweig: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "bookmark: %s\n"
-msgstr "Lesezeichen: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "tag: %s\n"
-msgstr "Marke: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "phase: %s\n"
-msgstr "Phase: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "parent: %s\n"
-msgstr "Vorgänger: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "manifest: %d:%s\n"
-msgstr "Manifest: %d:%s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "user: %s\n"
-msgstr "Nutzer: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "date: %s\n"
-msgstr "Datum: %s\n"
-
-#. i18n: column positioning for "hg log"
-msgid "files:"
-msgstr "Dateien:"
-
-#. i18n: column positioning for "hg log"
-msgid "files+:"
-msgstr "Dateien+:"
-
-#. i18n: column positioning for "hg log"
-msgid "files-:"
-msgstr "Dateien-:"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "files: %s\n"
-msgstr "Dateien: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "copies: %s\n"
-msgstr "Kopien: %s\n"
-
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "extra: %s=%s\n"
-msgstr "Extra: %s=%s\n"
-
msgid "description:\n"
msgstr "Beschreibung:\n"
-#. i18n: column positioning for "hg log"
-#, python-format
-msgid "summary: %s\n"
-msgstr "Zusammenfassung: %s\n"
-
#, python-format
msgid "%s: no key named '%s'"
msgstr "%s: kein Schlüsselwort '%s'"
@@ -23194,6 +23117,45 @@
":emailuser: Beliebiger Text. Gibt den Nutzerteil einer E-Mail-Adresse\n"
" (vor dem @-Zeichen) zurück."
+#. i18n: column positioning for "hg log"
+#, python-format
+msgid ""
+"bookmark: %s\n"
+"branch: %s\n"
+"changeset: %s\n"
+"copies: %s\n"
+"date: %s\n"
+"extra: %s=%s\n"
+"files+: %s\n"
+"files-: %s\n"
+"files: %s\n"
+"instability: %s\n"
+"manifest: %s\n"
+"obsolete: %s\n"
+"parent: %s\n"
+"phase: %s\n"
+"summary: %s\n"
+"tag: %s\n"
+"user: %s\n"
+msgstr ""
+"Lesezeichen: %s\n"
+"Zweig: %s\n"
+"Änderung: %s\n"
+"Kopien: %s\n"
+"Datum: %s\n"
+"Extra: %s=%s\n"
+"Dateien+: %s\n"
+"Dateien-: %s\n"
+"Dateien: %s\n"
+"instability: %s\n"
+"Manifest: %s\n"
+"obsolete: %s\n"
+"Vorgänger: %s\n"
+"Phase: %s\n"
+"Zusammenfassung: %s\n"
+"Marke: %s\n"
+"Nutzer: %s\n"
+
msgid ":author: String. The unmodified author of the changeset."
msgstr ":author: Zeichenkette. Der unveränderte Autor eines Änderungssatzes."
--- a/mercurial/__init__.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/__init__.py Mon Jan 22 17:53:02 2018 -0500
@@ -31,9 +31,6 @@
# Only handle Mercurial-related modules.
if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
return None
- # selectors2 is already dual-version clean, don't try and mangle it
- if fullname.startswith('mercurial.selectors2'):
- return None
# third-party packages are expected to be dual-version clean
if fullname.startswith('mercurial.thirdparty'):
return None
--- a/mercurial/archival.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/archival.py Mon Jan 22 17:53:02 2018 -0500
@@ -126,7 +126,7 @@
def __init__(self, *args, **kw):
timestamp = None
if 'timestamp' in kw:
- timestamp = kw.pop('timestamp')
+ timestamp = kw.pop(r'timestamp')
if timestamp is None:
self.timestamp = time.time()
else:
@@ -262,6 +262,7 @@
def __init__(self, name, mtime):
self.basedir = name
self.opener = vfsmod.vfs(self.basedir)
+ self.mtime = mtime
def addfile(self, name, mode, islink, data):
if islink:
@@ -272,6 +273,8 @@
f.close()
destfile = os.path.join(self.basedir, name)
os.chmod(destfile, mode)
+ if self.mtime is not None:
+ os.utime(destfile, (self.mtime, self.mtime))
def done(self):
pass
@@ -299,7 +302,12 @@
matchfn is function to filter names of files to write to archive.
- prefix is name of path to put before every archive member.'''
+ prefix is name of path to put before every archive member.
+
+ mtime is the modified time, in seconds, or None to use the changeset time.
+
+ subrepos tells whether to include subrepos.
+ '''
if kind == 'files':
if prefix:
--- a/mercurial/bdiff.c Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/bdiff.c Mon Jan 22 17:53:02 2018 -0500
@@ -41,7 +41,7 @@
if (p == plast)
i++;
- *lr = l = (struct bdiff_line *)malloc(sizeof(struct bdiff_line) * i);
+ *lr = l = (struct bdiff_line *)calloc(i, sizeof(struct bdiff_line));
if (!l)
return -1;
@@ -95,7 +95,7 @@
/* try to allocate a large hash table to avoid collisions */
for (scale = 4; scale; scale /= 2) {
- h = (struct pos *)malloc(scale * buckets * sizeof(struct pos));
+ h = (struct pos *)calloc(buckets, scale * sizeof(struct pos));
if (h)
break;
}
--- a/mercurial/bookmarks.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/bookmarks.py Mon Jan 22 17:53:02 2018 -0500
@@ -8,17 +8,18 @@
from __future__ import absolute_import
import errno
+import struct
from .i18n import _
from .node import (
bin,
hex,
short,
+ wdirid,
)
from . import (
encoding,
error,
- lock as lockmod,
obsutil,
pycompat,
scmutil,
@@ -121,6 +122,12 @@
self._clean = False
return dict.__delitem__(self, key)
+ def update(self, *others):
+ msg = ("bookmarks.update(...)' is deprecated, "
+ "use 'bookmarks.applychanges'")
+ self._repo.ui.deprecwarn(msg, '4.5')
+ return dict.update(self, *others)
+
def applychanges(self, repo, tr, changes):
"""Apply a list of changes to bookmarks
"""
@@ -390,14 +397,8 @@
bmchanges.append((bm, None))
if bmchanges:
- lock = tr = None
- try:
- lock = repo.lock()
- tr = repo.transaction('bookmark')
+ with repo.lock(), repo.transaction('bookmark') as tr:
marks.applychanges(repo, tr, bmchanges)
- tr.close()
- finally:
- lockmod.release(tr, lock)
return bool(bmchanges)
def listbinbookmarks(repo):
@@ -418,11 +419,7 @@
return d
def pushbookmark(repo, key, old, new):
- w = l = tr = None
- try:
- w = repo.wlock()
- l = repo.lock()
- tr = repo.transaction('bookmarks')
+ with repo.wlock(), repo.lock(), repo.transaction('bookmarks') as tr:
marks = repo._bookmarks
existing = hex(marks.get(key, ''))
if existing != old and existing != new:
@@ -434,10 +431,7 @@
return False
changes = [(key, repo[new].node())]
marks.applychanges(repo, tr, changes)
- tr.close()
return True
- finally:
- lockmod.release(tr, l, w)
def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
'''Compare bookmarks between srcmarks and dstmarks
@@ -550,6 +544,60 @@
binremotemarks[name] = bin(node)
return binremotemarks
+_binaryentry = struct.Struct('>20sH')
+
+def binaryencode(bookmarks):
+ """encode a '(bookmark, node)' iterable into a binary stream
+
+ the binary format is:
+
+ <node><bookmark-length><bookmark-name>
+
+ :node: is a 20 bytes binary node,
+ :bookmark-length: an unsigned short,
+ :bookmark-name: the name of the bookmark (of length <bookmark-length>)
+
+ wdirid (all bits set) will be used as a special value for "missing"
+ """
+ binarydata = []
+ for book, node in bookmarks:
+ if not node: # None or ''
+ node = wdirid
+ binarydata.append(_binaryentry.pack(node, len(book)))
+ binarydata.append(book)
+ return ''.join(binarydata)
+
+def binarydecode(stream):
+ """decode a binary stream into an '(bookmark, node)' iterable
+
+ the binary format is:
+
+ <node><bookmark-length><bookmark-name>
+
+ :node: is a 20 bytes binary node,
+ :bookmark-length: an unsigned short,
+ :bookmark-name: the name of the bookmark (of length <bookmark-length>))
+
+ wdirid (all bits set) will be used as a special value for "missing"
+ """
+ entrysize = _binaryentry.size
+ books = []
+ while True:
+ entry = stream.read(entrysize)
+ if len(entry) < entrysize:
+ if entry:
+ raise error.Abort(_('bad bookmark stream'))
+ break
+ node, length = _binaryentry.unpack(entry)
+ bookmark = stream.read(length)
+ if len(bookmark) < length:
+ if entry:
+ raise error.Abort(_('bad bookmark stream'))
+ if node == wdirid:
+ node = None
+ books.append((bookmark, node))
+ return books
+
def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
ui.debug("checking for updated bookmarks\n")
localmarks = repo._bookmarks
@@ -788,6 +836,12 @@
cur = repo.changectx('.').node()
newact = None
changes = []
+ hiddenrev = None
+
+ # unhide revs if any
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+
for mark in names:
mark = checkformat(repo, mark)
if newact is None:
@@ -797,10 +851,21 @@
return
tgt = cur
if rev:
- tgt = scmutil.revsingle(repo, rev).node()
+ ctx = scmutil.revsingle(repo, rev)
+ if ctx.hidden():
+ hiddenrev = ctx.hex()[:12]
+ tgt = ctx.node()
for bm in marks.checkconflict(mark, force, tgt):
changes.append((bm, None))
changes.append((mark, tgt))
+
+ if hiddenrev:
+ repo.ui.warn(_("bookmarking hidden changeset %s\n") % hiddenrev)
+
+ if ctx.obsolete():
+ msg = obsutil._getfilteredreason(repo, "%s" % hiddenrev, ctx)
+ repo.ui.warn("(%s)\n" % msg)
+
marks.applychanges(repo, tr, changes)
if not inactive and cur == marks[newact] and not rev:
activate(repo, newact)
--- a/mercurial/branchmap.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/branchmap.py Mon Jan 22 17:53:02 2018 -0500
@@ -84,6 +84,7 @@
# This create and ordering used for branchmap purpose.
# the ordering may be partial
subsettable = {None: 'visible',
+ 'visible-hidden': 'visible',
'visible': 'served',
'served': 'immutable',
'immutable': 'base'}
--- a/mercurial/bundle2.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/bundle2.py Mon Jan 22 17:53:02 2018 -0500
@@ -148,6 +148,7 @@
from __future__ import absolute_import, division
import errno
+import os
import re
import string
import struct
@@ -155,6 +156,7 @@
from .i18n import _
from . import (
+ bookmarks,
changegroup,
error,
node as nodemod,
@@ -162,6 +164,7 @@
phases,
pushkey,
pycompat,
+ streamclone,
tags,
url,
util,
@@ -180,7 +183,7 @@
_fpayloadsize = '>i'
_fpartparamcount = '>BB'
-preferedchunksize = 4096
+preferedchunksize = 32768
_parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
@@ -299,6 +302,8 @@
self.captureoutput = captureoutput
self.hookargs = {}
self._gettransaction = transactiongetter
+ # carries value that can modify part behavior
+ self.modes = {}
def gettransaction(self):
transaction = self._gettransaction()
@@ -362,7 +367,7 @@
self.count = count
self.current = p
yield p
- p.seek(0, 2)
+ p.consume()
self.current = None
self.iterator = func()
return self.iterator
@@ -384,11 +389,11 @@
try:
if self.current:
# consume the part content to not corrupt the stream.
- self.current.seek(0, 2)
+ self.current.consume()
for part in self.iterator:
# consume the bundle content
- part.seek(0, 2)
+ part.consume()
except Exception:
seekerror = True
@@ -594,6 +599,10 @@
self.capabilities = dict(capabilities)
self._compengine = util.compengines.forbundletype('UN')
self._compopts = None
+ # If compression is being handled by a consumer of the raw
+ # data (e.g. the wire protocol), unsetting this flag tells
+ # consumers that the bundle is best left uncompressed.
+ self.prefercompressed = True
def setcompression(self, alg, compopts=None):
"""setup core part compression to <alg>"""
@@ -844,8 +853,9 @@
yield self._readexact(size)
- def iterparts(self):
+ def iterparts(self, seekable=False):
"""yield all parts contained in the stream"""
+ cls = seekableunbundlepart if seekable else unbundlepart
# make sure param have been loaded
self.params
# From there, payload need to be decompressed
@@ -853,13 +863,12 @@
indebug(self.ui, 'start extraction of bundle2 parts')
headerblock = self._readpartheader()
while headerblock is not None:
- part = unbundlepart(self.ui, headerblock, self._fp)
+ part = cls(self.ui, headerblock, self._fp)
yield part
- # Seek to the end of the part to force it's consumption so the next
- # part can be read. But then seek back to the beginning so the
- # code consuming this generator has a part that starts at 0.
- part.seek(0, 2)
- part.seek(0)
+ # Ensure part is fully consumed so we can start reading the next
+ # part.
+ part.consume()
+
headerblock = self._readpartheader()
indebug(self.ui, 'end of bundle2 stream')
@@ -1164,7 +1173,7 @@
raise
finally:
if not hardabort:
- part.seek(0, 2)
+ part.consume()
self.ui.debug('bundle2-input-stream-interrupt:'
' closing out of band context\n')
@@ -1186,6 +1195,55 @@
def gettransaction(self):
raise TransactionUnavailable('no repo access from stream interruption')
+def decodepayloadchunks(ui, fh):
+ """Reads bundle2 part payload data into chunks.
+
+ Part payload data consists of framed chunks. This function takes
+ a file handle and emits those chunks.
+ """
+ dolog = ui.configbool('devel', 'bundle2.debug')
+ debug = ui.debug
+
+ headerstruct = struct.Struct(_fpayloadsize)
+ headersize = headerstruct.size
+ unpack = headerstruct.unpack
+
+ readexactly = changegroup.readexactly
+ read = fh.read
+
+ chunksize = unpack(readexactly(fh, headersize))[0]
+ indebug(ui, 'payload chunk size: %i' % chunksize)
+
+ # changegroup.readexactly() is inlined below for performance.
+ while chunksize:
+ if chunksize >= 0:
+ s = read(chunksize)
+ if len(s) < chunksize:
+ raise error.Abort(_('stream ended unexpectedly '
+ ' (got %d bytes, expected %d)') %
+ (len(s), chunksize))
+
+ yield s
+ elif chunksize == flaginterrupt:
+ # Interrupt "signal" detected. The regular stream is interrupted
+ # and a bundle2 part follows. Consume it.
+ interrupthandler(ui, fh)()
+ else:
+ raise error.BundleValueError(
+ 'negative payload chunk size: %s' % chunksize)
+
+ s = read(headersize)
+ if len(s) < headersize:
+ raise error.Abort(_('stream ended unexpectedly '
+ ' (got %d bytes, expected %d)') %
+ (len(s), chunksize))
+
+ chunksize = unpack(s)[0]
+
+ # indebug() inlined for performance.
+ if dolog:
+ debug('bundle2-input: payload chunk size: %i\n' % chunksize)
+
class unbundlepart(unpackermixin):
"""a bundle part read from a bundle"""
@@ -1206,10 +1264,8 @@
self.advisoryparams = None
self.params = None
self.mandatorykeys = ()
- self._payloadstream = None
self._readheader()
self._mandatory = None
- self._chunkindex = [] #(payload, file) position tuples for chunk starts
self._pos = 0
def _fromheader(self, size):
@@ -1236,46 +1292,6 @@
self.params.update(self.advisoryparams)
self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
- def _payloadchunks(self, chunknum=0):
- '''seek to specified chunk and start yielding data'''
- if len(self._chunkindex) == 0:
- assert chunknum == 0, 'Must start with chunk 0'
- self._chunkindex.append((0, self._tellfp()))
- else:
- assert chunknum < len(self._chunkindex), \
- 'Unknown chunk %d' % chunknum
- self._seekfp(self._chunkindex[chunknum][1])
-
- pos = self._chunkindex[chunknum][0]
- payloadsize = self._unpack(_fpayloadsize)[0]
- indebug(self.ui, 'payload chunk size: %i' % payloadsize)
- while payloadsize:
- if payloadsize == flaginterrupt:
- # interruption detection, the handler will now read a
- # single part and process it.
- interrupthandler(self.ui, self._fp)()
- elif payloadsize < 0:
- msg = 'negative payload chunk size: %i' % payloadsize
- raise error.BundleValueError(msg)
- else:
- result = self._readexact(payloadsize)
- chunknum += 1
- pos += payloadsize
- if chunknum == len(self._chunkindex):
- self._chunkindex.append((pos, self._tellfp()))
- yield result
- payloadsize = self._unpack(_fpayloadsize)[0]
- indebug(self.ui, 'payload chunk size: %i' % payloadsize)
-
- def _findchunk(self, pos):
- '''for a given payload position, return a chunk number and offset'''
- for chunk, (ppos, fpos) in enumerate(self._chunkindex):
- if ppos == pos:
- return chunk, 0
- elif ppos > pos:
- return chunk - 1, pos - self._chunkindex[chunk - 1][0]
- raise ValueError('Unknown chunk')
-
def _readheader(self):
"""read the header and setup the object"""
typesize = self._unpackheader(_fparttypesize)[0]
@@ -1311,6 +1327,24 @@
# we read the data, tell it
self._initialized = True
+ def _payloadchunks(self):
+ """Generator of decoded chunks in the payload."""
+ return decodepayloadchunks(self.ui, self._fp)
+
+ def consume(self):
+ """Read the part payload until completion.
+
+ By consuming the part data, the underlying stream read offset will
+ be advanced to the next part (or end of stream).
+ """
+ if self.consumed:
+ return
+
+ chunk = self.read(32768)
+ while chunk:
+ self._pos += len(chunk)
+ chunk = self.read(32768)
+
def read(self, size=None):
"""read payload data"""
if not self._initialized:
@@ -1327,23 +1361,82 @@
self.consumed = True
return data
+class seekableunbundlepart(unbundlepart):
+ """A bundle2 part in a bundle that is seekable.
+
+ Regular ``unbundlepart`` instances can only be read once. This class
+ extends ``unbundlepart`` to enable bi-directional seeking within the
+ part.
+
+ Bundle2 part data consists of framed chunks. Offsets when seeking
+ refer to the decoded data, not the offsets in the underlying bundle2
+ stream.
+
+ To facilitate quickly seeking within the decoded data, instances of this
+ class maintain a mapping between offsets in the underlying stream and
+ the decoded payload. This mapping will consume memory in proportion
+ to the number of chunks within the payload (which almost certainly
+ increases in proportion with the size of the part).
+ """
+ def __init__(self, ui, header, fp):
+ # (payload, file) offsets for chunk starts.
+ self._chunkindex = []
+
+ super(seekableunbundlepart, self).__init__(ui, header, fp)
+
+ def _payloadchunks(self, chunknum=0):
+ '''seek to specified chunk and start yielding data'''
+ if len(self._chunkindex) == 0:
+ assert chunknum == 0, 'Must start with chunk 0'
+ self._chunkindex.append((0, self._tellfp()))
+ else:
+ assert chunknum < len(self._chunkindex), \
+ 'Unknown chunk %d' % chunknum
+ self._seekfp(self._chunkindex[chunknum][1])
+
+ pos = self._chunkindex[chunknum][0]
+
+ for chunk in decodepayloadchunks(self.ui, self._fp):
+ chunknum += 1
+ pos += len(chunk)
+ if chunknum == len(self._chunkindex):
+ self._chunkindex.append((pos, self._tellfp()))
+
+ yield chunk
+
+ def _findchunk(self, pos):
+ '''for a given payload position, return a chunk number and offset'''
+ for chunk, (ppos, fpos) in enumerate(self._chunkindex):
+ if ppos == pos:
+ return chunk, 0
+ elif ppos > pos:
+ return chunk - 1, pos - self._chunkindex[chunk - 1][0]
+ raise ValueError('Unknown chunk')
+
def tell(self):
return self._pos
- def seek(self, offset, whence=0):
- if whence == 0:
+ def seek(self, offset, whence=os.SEEK_SET):
+ if whence == os.SEEK_SET:
newpos = offset
- elif whence == 1:
+ elif whence == os.SEEK_CUR:
newpos = self._pos + offset
- elif whence == 2:
+ elif whence == os.SEEK_END:
if not self.consumed:
- self.read()
+ # Can't use self.consume() here because it advances self._pos.
+ chunk = self.read(32768)
+ while chunk:
+ chunk = self.read(32768)
newpos = self._chunkindex[-1][0] - offset
else:
raise ValueError('Unknown whence value: %r' % (whence,))
if newpos > self._chunkindex[-1][0] and not self.consumed:
- self.read()
+ # Can't use self.consume() here because it advances self._pos.
+ chunk = self.read(32768)
+ while chunk:
+ chunk = self.read(32668)
+
if not 0 <= newpos <= self._chunkindex[-1][0]:
raise ValueError('Offset out of range')
@@ -1389,6 +1482,7 @@
# These are only the static capabilities.
# Check the 'getrepocaps' function for the rest.
capabilities = {'HG20': (),
+ 'bookmarks': (),
'error': ('abort', 'unsupportedcontent', 'pushraced',
'pushkey'),
'listkeys': (),
@@ -1397,13 +1491,21 @@
'remote-changegroup': ('http', 'https'),
'hgtagsfnodes': (),
'phases': ('heads',),
+ 'stream': ('v2',),
}
-def getrepocaps(repo, allowpushback=False):
+def getrepocaps(repo, allowpushback=False, role=None):
"""return the bundle2 capabilities for a given repo
Exists to allow extensions (like evolution) to mutate the capabilities.
+
+ The returned value is used for servers advertising their capabilities as
+ well as clients advertising their capabilities to servers as part of
+ bundle2 requests. The ``role`` argument specifies which is which.
"""
+ if role not in ('client', 'server'):
+ raise error.ProgrammingError('role argument must be client or server')
+
caps = capabilities.copy()
caps['changegroup'] = tuple(sorted(
changegroup.supportedincomingversions(repo)))
@@ -1417,6 +1519,18 @@
caps['checkheads'] = ('related',)
if 'phases' in repo.ui.configlist('devel', 'legacy.exchange'):
caps.pop('phases')
+
+ # Don't advertise stream clone support in server mode if not configured.
+ if role == 'server':
+ streamsupported = repo.ui.configbool('server', 'uncompressed',
+ untrusted=True)
+ featuresupported = repo.ui.configbool('experimental', 'bundle2.stream')
+
+ if not streamsupported or not featuresupported:
+ caps.pop('stream')
+ # Else always advertise support on client, because payload support
+ # should always be advertised.
+
return caps
def bundle2caps(remote):
@@ -1702,6 +1816,34 @@
replyto = int(inpart.params['in-reply-to'])
op.records.add('changegroup', {'return': ret}, replyto)
+@parthandler('check:bookmarks')
+def handlecheckbookmarks(op, inpart):
+ """check location of bookmarks
+
+ This part is to be used to detect push race regarding bookmark, it
+ contains binary encoded (bookmark, node) tuple. If the local state does
+ not marks the one in the part, a PushRaced exception is raised
+ """
+ bookdata = bookmarks.binarydecode(inpart)
+
+ msgstandard = ('repository changed while pushing - please try again '
+ '(bookmark "%s" move from %s to %s)')
+ msgmissing = ('repository changed while pushing - please try again '
+ '(bookmark "%s" is missing, expected %s)')
+ msgexist = ('repository changed while pushing - please try again '
+ '(bookmark "%s" set on %s, expected missing)')
+ for book, node in bookdata:
+ currentnode = op.repo._bookmarks.get(book)
+ if currentnode != node:
+ if node is None:
+ finalmsg = msgexist % (book, nodemod.short(currentnode))
+ elif currentnode is None:
+ finalmsg = msgmissing % (book, nodemod.short(node))
+ else:
+ finalmsg = msgstandard % (book, nodemod.short(node),
+ nodemod.short(currentnode))
+ raise error.PushRaced(finalmsg)
+
@parthandler('check:heads')
def handlecheckheads(op, inpart):
"""check that head of the repo did not change
@@ -1861,6 +2003,60 @@
kwargs[key] = inpart.params[key]
raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
+@parthandler('bookmarks')
+def handlebookmark(op, inpart):
+ """transmit bookmark information
+
+ The part contains binary encoded bookmark information.
+
+ The exact behavior of this part can be controlled by the 'bookmarks' mode
+ on the bundle operation.
+
+ When mode is 'apply' (the default) the bookmark information is applied as
+ is to the unbundling repository. Make sure a 'check:bookmarks' part is
+ issued earlier to check for push races in such update. This behavior is
+ suitable for pushing.
+
+ When mode is 'records', the information is recorded into the 'bookmarks'
+ records of the bundle operation. This behavior is suitable for pulling.
+ """
+ changes = bookmarks.binarydecode(inpart)
+
+ pushkeycompat = op.repo.ui.configbool('server', 'bookmarks-pushkey-compat')
+ bookmarksmode = op.modes.get('bookmarks', 'apply')
+
+ if bookmarksmode == 'apply':
+ tr = op.gettransaction()
+ bookstore = op.repo._bookmarks
+ if pushkeycompat:
+ allhooks = []
+ for book, node in changes:
+ hookargs = tr.hookargs.copy()
+ hookargs['pushkeycompat'] = '1'
+ hookargs['namespace'] = 'bookmark'
+ hookargs['key'] = book
+ hookargs['old'] = nodemod.hex(bookstore.get(book, ''))
+ hookargs['new'] = nodemod.hex(node if node is not None else '')
+ allhooks.append(hookargs)
+
+ for hookargs in allhooks:
+ op.repo.hook('prepushkey', throw=True, **hookargs)
+
+ bookstore.applychanges(op.repo, op.gettransaction(), changes)
+
+ if pushkeycompat:
+ def runhook():
+ for hookargs in allhooks:
+ op.repo.hook('pushkey', **hookargs)
+ op.repo._afterlock(runhook)
+
+ elif bookmarksmode == 'records':
+ for book, node in changes:
+ record = {'bookmark': book, 'node': node}
+ op.records.add('bookmarks', record)
+ else:
+ raise error.ProgrammingError('unkown bookmark mode: %s' % bookmarksmode)
+
@parthandler('phase-heads')
def handlephases(op, inpart):
"""apply phases from bundle part to repo"""
@@ -1885,7 +2081,7 @@
# The mergemarkers call will crash if marker creation is not enabled.
# we want to avoid this if the part is advisory.
if not inpart.mandatory and op.repo.obsstore.readonly:
- op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
+ op.repo.ui.debug('ignoring obsolescence markers, feature not enabled\n')
return
new = op.repo.obsstore.mergemarkers(tr, markerdata)
op.repo.invalidatevolatilesets()
@@ -1943,3 +2139,27 @@
key = "USERVAR_" + key
hookargs[key] = value
op.addhookargs(hookargs)
+
+@parthandler('stream2', ('requirements', 'filecount', 'bytecount'))
+def handlestreamv2bundle(op, part):
+
+ requirements = part.params['requirements'].split()
+ filecount = int(part.params['filecount'])
+ bytecount = int(part.params['bytecount'])
+
+ repo = op.repo
+ if len(repo):
+ msg = _('cannot apply stream clone to non empty repository')
+ raise error.Abort(msg)
+
+ repo.ui.debug('applying stream bundle\n')
+ streamclone.applybundlev2(repo, part, filecount, bytecount,
+ requirements)
+
+ # new requirements = old non-format requirements +
+ # new format-related remote requirements
+ # requirements from the streamed-in repository
+ repo.requirements = set(requirements) | (
+ repo.requirements - repo.supportedformats)
+ repo._applyopenerreqs()
+ repo._writerequirements()
--- a/mercurial/bundlerepo.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/bundlerepo.py Mon Jan 22 17:53:02 2018 -0500
@@ -42,7 +42,7 @@
)
class bundlerevlog(revlog.revlog):
- def __init__(self, opener, indexfile, bundle, linkmapper):
+ def __init__(self, opener, indexfile, cgunpacker, linkmapper):
# How it works:
# To retrieve a revision, we need to know the offset of the revision in
# the bundle (an unbundle object). We store this offset in the index
@@ -52,15 +52,15 @@
# check revision against repotiprev.
opener = vfsmod.readonlyvfs(opener)
revlog.revlog.__init__(self, opener, indexfile)
- self.bundle = bundle
+ self.bundle = cgunpacker
n = len(self)
self.repotiprev = n - 1
self.bundlerevs = set() # used by 'bundle()' revset expression
- for deltadata in bundle.deltaiter():
+ for deltadata in cgunpacker.deltaiter():
node, p1, p2, cs, deltabase, delta, flags = deltadata
size = len(delta)
- start = bundle.tell() - size
+ start = cgunpacker.tell() - size
link = linkmapper(cs)
if node in self.nodemap:
@@ -86,7 +86,7 @@
self.bundlerevs.add(n)
n += 1
- def _chunk(self, rev):
+ def _chunk(self, rev, df=None):
# Warning: in case of bundle, the diff is against what we stored as
# delta base, not against rev - 1
# XXX: could use some caching
@@ -108,7 +108,7 @@
return mdiff.textdiff(self.revision(rev1, raw=True),
self.revision(rev2, raw=True))
- def revision(self, nodeorrev, raw=False):
+ def revision(self, nodeorrev, _df=None, raw=False):
"""return an uncompressed revision of a given node or revision
number.
"""
@@ -152,20 +152,23 @@
# needs to override 'baserevision' and make more specific call here.
return revlog.revlog.revision(self, nodeorrev, raw=True)
- def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
+ def addrevision(self, *args, **kwargs):
+ raise NotImplementedError
+
+ def addgroup(self, *args, **kwargs):
raise NotImplementedError
- def addgroup(self, deltas, transaction, addrevisioncb=None):
+
+ def strip(self, *args, **kwargs):
raise NotImplementedError
- def strip(self, rev, minlink):
- raise NotImplementedError
+
def checksize(self):
raise NotImplementedError
class bundlechangelog(bundlerevlog, changelog.changelog):
- def __init__(self, opener, bundle):
+ def __init__(self, opener, cgunpacker):
changelog.changelog.__init__(self, opener)
linkmapper = lambda x: x
- bundlerevlog.__init__(self, opener, self.indexfile, bundle,
+ bundlerevlog.__init__(self, opener, self.indexfile, cgunpacker,
linkmapper)
def baserevision(self, nodeorrev):
@@ -183,9 +186,10 @@
self.filteredrevs = oldfilter
class bundlemanifest(bundlerevlog, manifest.manifestrevlog):
- def __init__(self, opener, bundle, linkmapper, dirlogstarts=None, dir=''):
+ def __init__(self, opener, cgunpacker, linkmapper, dirlogstarts=None,
+ dir=''):
manifest.manifestrevlog.__init__(self, opener, dir=dir)
- bundlerevlog.__init__(self, opener, self.indexfile, bundle,
+ bundlerevlog.__init__(self, opener, self.indexfile, cgunpacker,
linkmapper)
if dirlogstarts is None:
dirlogstarts = {}
@@ -214,9 +218,9 @@
return super(bundlemanifest, self).dirlog(d)
class bundlefilelog(bundlerevlog, filelog.filelog):
- def __init__(self, opener, path, bundle, linkmapper):
+ def __init__(self, opener, path, cgunpacker, linkmapper):
filelog.filelog.__init__(self, opener, path)
- bundlerevlog.__init__(self, opener, self.indexfile, bundle,
+ bundlerevlog.__init__(self, opener, self.indexfile, cgunpacker,
linkmapper)
def baserevision(self, nodeorrev):
@@ -243,82 +247,106 @@
self.invalidate()
self.dirty = True
-def _getfilestarts(bundle):
- bundlefilespos = {}
- for chunkdata in iter(bundle.filelogheader, {}):
+def _getfilestarts(cgunpacker):
+ filespos = {}
+ for chunkdata in iter(cgunpacker.filelogheader, {}):
fname = chunkdata['filename']
- bundlefilespos[fname] = bundle.tell()
- for chunk in iter(lambda: bundle.deltachunk(None), {}):
+ filespos[fname] = cgunpacker.tell()
+ for chunk in iter(lambda: cgunpacker.deltachunk(None), {}):
pass
- return bundlefilespos
+ return filespos
class bundlerepository(localrepo.localrepository):
- def __init__(self, ui, path, bundlename):
+ """A repository instance that is a union of a local repo and a bundle.
+
+ Instances represent a read-only repository composed of a local repository
+ with the contents of a bundle file applied. The repository instance is
+ conceptually similar to the state of a repository after an
+ ``hg unbundle`` operation. However, the contents of the bundle are never
+ applied to the actual base repository.
+ """
+ def __init__(self, ui, repopath, bundlepath):
self._tempparent = None
try:
- localrepo.localrepository.__init__(self, ui, path)
+ localrepo.localrepository.__init__(self, ui, repopath)
except error.RepoError:
self._tempparent = tempfile.mkdtemp()
localrepo.instance(ui, self._tempparent, 1)
localrepo.localrepository.__init__(self, ui, self._tempparent)
self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
- if path:
- self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
+ if repopath:
+ self._url = 'bundle:' + util.expandpath(repopath) + '+' + bundlepath
else:
- self._url = 'bundle:' + bundlename
+ self._url = 'bundle:' + bundlepath
self.tempfile = None
- f = util.posixfile(bundlename, "rb")
- self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename)
+ f = util.posixfile(bundlepath, "rb")
+ bundle = exchange.readbundle(ui, f, bundlepath)
- if isinstance(self.bundle, bundle2.unbundle20):
- hadchangegroup = False
- for part in self.bundle.iterparts():
+ if isinstance(bundle, bundle2.unbundle20):
+ self._bundlefile = bundle
+ self._cgunpacker = None
+
+ cgpart = None
+ for part in bundle.iterparts(seekable=True):
if part.type == 'changegroup':
- if hadchangegroup:
+ if cgpart:
raise NotImplementedError("can't process "
"multiple changegroups")
- hadchangegroup = True
+ cgpart = part
- self._handlebundle2part(part)
+ self._handlebundle2part(bundle, part)
- if not hadchangegroup:
+ if not cgpart:
raise error.Abort(_("No changegroups found"))
- elif self.bundle.compressed():
- f = self._writetempbundle(self.bundle.read, '.hg10un',
- header='HG10UN')
- self.bundlefile = self.bundle = exchange.readbundle(ui, f,
- bundlename,
- self.vfs)
+ # This is required to placate a later consumer, which expects
+ # the payload offset to be at the beginning of the changegroup.
+ # We need to do this after the iterparts() generator advances
+ # because iterparts() will seek to end of payload after the
+ # generator returns control to iterparts().
+ cgpart.seek(0, os.SEEK_SET)
- # dict with the mapping 'filename' -> position in the bundle
- self.bundlefilespos = {}
+ elif isinstance(bundle, changegroup.cg1unpacker):
+ if bundle.compressed():
+ f = self._writetempbundle(bundle.read, '.hg10un',
+ header='HG10UN')
+ bundle = exchange.readbundle(ui, f, bundlepath, self.vfs)
+
+ self._bundlefile = bundle
+ self._cgunpacker = bundle
+ else:
+ raise error.Abort(_('bundle type %s cannot be read') %
+ type(bundle))
+
+ # dict with the mapping 'filename' -> position in the changegroup.
+ self._cgfilespos = {}
self.firstnewrev = self.changelog.repotiprev + 1
phases.retractboundary(self, None, phases.draft,
[ctx.node() for ctx in self[self.firstnewrev:]])
- def _handlebundle2part(self, part):
- if part.type == 'changegroup':
- cgstream = part
- version = part.params.get('version', '01')
- legalcgvers = changegroup.supportedincomingversions(self)
- if version not in legalcgvers:
- msg = _('Unsupported changegroup version: %s')
- raise error.Abort(msg % version)
- if self.bundle.compressed():
- cgstream = self._writetempbundle(part.read,
- ".cg%sun" % version)
+ def _handlebundle2part(self, bundle, part):
+ if part.type != 'changegroup':
+ return
- self.bundle = changegroup.getunbundler(version, cgstream, 'UN')
+ cgstream = part
+ version = part.params.get('version', '01')
+ legalcgvers = changegroup.supportedincomingversions(self)
+ if version not in legalcgvers:
+ msg = _('Unsupported changegroup version: %s')
+ raise error.Abort(msg % version)
+ if bundle.compressed():
+ cgstream = self._writetempbundle(part.read, '.cg%sun' % version)
+
+ self._cgunpacker = changegroup.getunbundler(version, cgstream, 'UN')
def _writetempbundle(self, readfn, suffix, header=''):
"""Write a temporary file to disk
"""
fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
- suffix=".hg10un")
+ suffix=suffix)
self.tempfile = temp
with os.fdopen(fdtemp, pycompat.sysstr('wb')) as fptemp:
@@ -338,20 +366,29 @@
@localrepo.unfilteredpropertycache
def changelog(self):
# consume the header if it exists
- self.bundle.changelogheader()
- c = bundlechangelog(self.svfs, self.bundle)
- self.manstart = self.bundle.tell()
+ self._cgunpacker.changelogheader()
+ c = bundlechangelog(self.svfs, self._cgunpacker)
+ self.manstart = self._cgunpacker.tell()
return c
def _constructmanifest(self):
- self.bundle.seek(self.manstart)
+ self._cgunpacker.seek(self.manstart)
# consume the header if it exists
- self.bundle.manifestheader()
+ self._cgunpacker.manifestheader()
linkmapper = self.unfiltered().changelog.rev
- m = bundlemanifest(self.svfs, self.bundle, linkmapper)
- self.filestart = self.bundle.tell()
+ m = bundlemanifest(self.svfs, self._cgunpacker, linkmapper)
+ self.filestart = self._cgunpacker.tell()
return m
+ def _consumemanifest(self):
+ """Consumes the manifest portion of the bundle, setting filestart so the
+ file portion can be read."""
+ self._cgunpacker.seek(self.manstart)
+ self._cgunpacker.manifestheader()
+ for delta in self._cgunpacker.deltaiter():
+ pass
+ self.filestart = self._cgunpacker.tell()
+
@localrepo.unfilteredpropertycache
def manstart(self):
self.changelog
@@ -360,26 +397,34 @@
@localrepo.unfilteredpropertycache
def filestart(self):
self.manifestlog
+
+ # If filestart was not set by self.manifestlog, that means the
+ # manifestlog implementation did not consume the manifests from the
+ # changegroup (ex: it might be consuming trees from a separate bundle2
+ # part instead). So we need to manually consume it.
+ if 'filestart' not in self.__dict__:
+ self._consumemanifest()
+
return self.filestart
def url(self):
return self._url
def file(self, f):
- if not self.bundlefilespos:
- self.bundle.seek(self.filestart)
- self.bundlefilespos = _getfilestarts(self.bundle)
+ if not self._cgfilespos:
+ self._cgunpacker.seek(self.filestart)
+ self._cgfilespos = _getfilestarts(self._cgunpacker)
- if f in self.bundlefilespos:
- self.bundle.seek(self.bundlefilespos[f])
+ if f in self._cgfilespos:
+ self._cgunpacker.seek(self._cgfilespos[f])
linkmapper = self.unfiltered().changelog.rev
- return bundlefilelog(self.svfs, f, self.bundle, linkmapper)
+ return bundlefilelog(self.svfs, f, self._cgunpacker, linkmapper)
else:
return filelog.filelog(self.svfs, f)
def close(self):
"""Close assigned bundle file immediately."""
- self.bundlefile.close()
+ self._bundlefile.close()
if self.tempfile is not None:
self.vfs.unlink(self.tempfile)
if self._tempparent:
@@ -496,10 +541,10 @@
and other.capable('bundle2'))
if canbundle2:
kwargs = {}
- kwargs['common'] = common
- kwargs['heads'] = rheads
- kwargs['bundlecaps'] = exchange.caps20to10(repo)
- kwargs['cg'] = True
+ kwargs[r'common'] = common
+ kwargs[r'heads'] = rheads
+ kwargs[r'bundlecaps'] = exchange.caps20to10(repo, role='client')
+ kwargs[r'cg'] = True
b2 = other.getbundle('incoming', **kwargs)
fname = bundle = changegroup.writechunks(ui, b2._forwardchunks(),
bundlename)
--- a/mercurial/byterange.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/byterange.py Mon Jan 22 17:53:02 2018 -0500
@@ -416,7 +416,7 @@
if range_header is None:
return None
if _rangere is None:
- _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)')
+ _rangere = re.compile(br'^bytes=(\d{1,})-(\d*)')
match = _rangere.match(range_header)
if match:
tup = range_tuple_normalize(match.group(1, 2))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cacheutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,21 @@
+# scmutil.py - Mercurial core utility functions
+#
+# Copyright Matt Mackall <mpm@selenic.com> and other
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+from __future__ import absolute_import
+
+from . import repoview
+
+def cachetocopy(srcrepo):
+ """return the list of cache file valuable to copy during a clone"""
+ # In local clones we're copying all nodes, not just served
+ # ones. Therefore copy all branch caches over.
+ cachefiles = ['branch2']
+ cachefiles += ['branch2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['rbc-names-v1', 'rbc-revs-v1']
+ cachefiles += ['tags2']
+ cachefiles += ['tags2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['hgtagsfnodes1']
+ return cachefiles
--- a/mercurial/cext/osutil.c Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/cext/osutil.c Mon Jan 22 17:53:02 2018 -0500
@@ -20,6 +20,7 @@
#include <windows.h>
#else
#include <dirent.h>
+#include <signal.h>
#include <sys/socket.h>
#include <sys/stat.h>
#include <sys/types.h>
@@ -1111,6 +1112,43 @@
}
#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
+#if defined(HAVE_BSD_STATFS)
+/* given a directory path, return filesystem mount point (best-effort) */
+static PyObject *getfsmountpoint(PyObject *self, PyObject *args)
+{
+ const char *path = NULL;
+ struct statfs buf;
+ int r;
+ if (!PyArg_ParseTuple(args, "s", &path))
+ return NULL;
+
+ memset(&buf, 0, sizeof(buf));
+ r = statfs(path, &buf);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("s", buf.f_mntonname);
+}
+#endif /* defined(HAVE_BSD_STATFS) */
+
+static PyObject *unblocksignal(PyObject *self, PyObject *args)
+{
+ int sig = 0;
+ int r;
+ if (!PyArg_ParseTuple(args, "i", &sig))
+ return NULL;
+ sigset_t set;
+ r = sigemptyset(&set);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ r = sigaddset(&set, sig);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ r = sigprocmask(SIG_UNBLOCK, &set, NULL);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ Py_RETURN_NONE;
+}
+
#endif /* ndef _WIN32 */
static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
@@ -1291,6 +1329,12 @@
{"getfstype", (PyCFunction)getfstype, METH_VARARGS,
"get filesystem type (best-effort)\n"},
#endif
+#if defined(HAVE_BSD_STATFS)
+ {"getfsmountpoint", (PyCFunction)getfsmountpoint, METH_VARARGS,
+ "get filesystem mount point (best-effort)\n"},
+#endif
+ {"unblocksignal", (PyCFunction)unblocksignal, METH_VARARGS,
+ "change signal mask to unblock a given signal\n"},
#endif /* ndef _WIN32 */
#ifdef __APPLE__
{
@@ -1301,7 +1345,7 @@
{NULL, NULL}
};
-static const int version = 1;
+static const int version = 3;
#ifdef IS_PY3K
static struct PyModuleDef osutil_module = {
--- a/mercurial/cext/parsers.c Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/cext/parsers.c Mon Jan 22 17:53:02 2018 -0500
@@ -710,7 +710,7 @@
void manifest_module_init(PyObject *mod);
void revlog_module_init(PyObject *mod);
-static const int version = 3;
+static const int version = 4;
static void module_init(PyObject *mod)
{
--- a/mercurial/cext/revlog.c Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/cext/revlog.c Mon Jan 22 17:53:02 2018 -0500
@@ -628,7 +628,7 @@
{
PyObject *roots = Py_None;
PyObject *ret = NULL;
- PyObject *phaseslist = NULL;
+ PyObject *phasessize = NULL;
PyObject *phaseroots = NULL;
PyObject *phaseset = NULL;
PyObject *phasessetlist = NULL;
@@ -685,12 +685,10 @@
}
}
/* Transform phase list to a python list */
- phaseslist = PyList_New(len);
- if (phaseslist == NULL)
+ phasessize = PyInt_FromLong(len);
+ if (phasessize == NULL)
goto release;
for (i = 0; i < len; i++) {
- PyObject *phaseval;
-
phase = phases[i];
/* We only store the sets of phase for non public phase, the public phase
* is computed as a difference */
@@ -702,15 +700,11 @@
PySet_Add(phaseset, rev);
Py_XDECREF(rev);
}
- phaseval = PyInt_FromLong(phase);
- if (phaseval == NULL)
- goto release;
- PyList_SET_ITEM(phaseslist, i, phaseval);
}
- ret = PyTuple_Pack(2, phaseslist, phasessetlist);
+ ret = PyTuple_Pack(2, phasessize, phasessetlist);
release:
- Py_XDECREF(phaseslist);
+ Py_XDECREF(phasessize);
Py_XDECREF(phasessetlist);
done:
free(phases);
--- a/mercurial/cext/util.h Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/cext/util.h Mon Jan 22 17:53:02 2018 -0500
@@ -27,7 +27,9 @@
extern PyTypeObject dirstateTupleType;
#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
+#ifndef MIN
#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+#endif
/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
#define true 1
--- a/mercurial/changegroup.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/changegroup.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,14 +32,7 @@
_CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
_CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
-def readexactly(stream, n):
- '''read n bytes from stream.read and abort if less was available'''
- s = stream.read(n)
- if len(s) < n:
- raise error.Abort(_("stream ended unexpectedly"
- " (got %d bytes, expected %d)")
- % (len(s), n))
- return s
+readexactly = util.readexactly
def getchunk(stream):
"""return the next chunk from stream as a string"""
@@ -692,7 +685,7 @@
# Callback for the manifest, used to collect linkrevs for filelog
# revisions.
# Returns the linkrev node (collected in lookupcl).
- def makelookupmflinknode(dir):
+ def makelookupmflinknode(dir, nodes):
if fastpathlinkrev:
assert not dir
return mfs.__getitem__
@@ -713,7 +706,7 @@
the client before you can trust the list of files and
treemanifests to send.
"""
- clnode = tmfnodes[dir][x]
+ clnode = nodes[x]
mdata = mfl.get(dir, x).readfast(shallow=True)
for p, n, fl in mdata.iterentries():
if fl == 't': # subdirectory manifest
@@ -733,15 +726,13 @@
size = 0
while tmfnodes:
- dir = min(tmfnodes)
- nodes = tmfnodes[dir]
+ dir, nodes = tmfnodes.popitem()
prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
if not dir or prunednodes:
for x in self._packmanifests(dir, prunednodes,
- makelookupmflinknode(dir)):
+ makelookupmflinknode(dir, nodes)):
size += len(x)
yield x
- del tmfnodes[dir]
self._verbosenote(_('%8.i (manifests)\n') % size)
yield self._manifestsdone()
--- a/mercurial/changelog.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/changelog.py Mon Jan 22 17:53:02 2018 -0500
@@ -295,11 +295,14 @@
self._divert = False
self.filteredrevs = frozenset()
+ def tiprev(self):
+ for i in xrange(len(self) -1, -2, -1):
+ if i not in self.filteredrevs:
+ return i
+
def tip(self):
"""filtered version of revlog.tip"""
- for i in xrange(len(self) -1, -2, -1):
- if i not in self.filteredrevs:
- return self.node(i)
+ return self.node(self.tiprev())
def __contains__(self, rev):
"""filtered version of revlog.__contains__"""
@@ -541,5 +544,10 @@
*args, **kwargs)
revs = transaction.changes.get('revs')
if revs is not None:
- revs.add(rev)
+ if revs:
+ assert revs[-1] + 1 == rev
+ revs = xrange(revs[0], rev + 1)
+ else:
+ revs = xrange(rev, rev + 1)
+ transaction.changes['revs'] = revs
return node
--- a/mercurial/chgserver.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/chgserver.py Mon Jan 22 17:53:02 2018 -0500
@@ -55,6 +55,7 @@
encoding,
error,
extensions,
+ node,
pycompat,
util,
)
@@ -63,7 +64,7 @@
def _hashlist(items):
"""return sha1 hexdigest for a list"""
- return hashlib.sha1(str(items)).hexdigest()
+ return node.hex(hashlib.sha1(str(items)).digest())
# sensitive config sections affecting confighash
_configsections = [
@@ -220,16 +221,7 @@
newui._csystem = srcui._csystem
# command line args
- options = {}
- if srcui.plain('strictflags'):
- options.update(dispatch._earlyparseopts(args))
- else:
- args = args[:]
- options['config'] = dispatch._earlygetopt(['--config'], args)
- cwds = dispatch._earlygetopt(['--cwd'], args)
- options['cwd'] = cwds and cwds[-1] or ''
- rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
- options['repository'] = rpath and rpath[-1] or ''
+ options = dispatch._earlyparseopts(newui, args)
dispatch._parseconfig(newui, options['config'])
# stolen from tortoisehg.util.copydynamicconfig()
--- a/mercurial/cmdutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/cmdutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -41,6 +41,8 @@
registrar,
revlog,
revset,
+ revsetlang,
+ rewriteutil,
scmutil,
smartset,
templatekw,
@@ -181,7 +183,7 @@
def setupwrapcolorwrite(ui):
# wrap ui.write so diff output can be labeled/colorized
def wrapwrite(orig, *args, **kw):
- label = kw.pop('label', '')
+ label = kw.pop(r'label', '')
for chunk, l in patch.difflabel(lambda: args):
orig(chunk, label=label + l)
@@ -372,7 +374,7 @@
# Make all of the pathnames absolute.
newfiles = [repo.wjoin(nf) for nf in newfiles]
- return commitfunc(ui, repo, *newfiles, **opts)
+ return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
finally:
# 5. finally restore backed-up files
try:
@@ -712,6 +714,97 @@
raise error.UnknownCommand(cmd, allcmds)
+def changebranch(ui, repo, revs, label):
+ """ Change the branch name of given revs to label """
+
+ with repo.wlock(), repo.lock(), repo.transaction('branches'):
+ # abort in case of uncommitted merge or dirty wdir
+ bailifchanged(repo)
+ revs = scmutil.revrange(repo, revs)
+ if not revs:
+ raise error.Abort("empty revision set")
+ roots = repo.revs('roots(%ld)', revs)
+ if len(roots) > 1:
+ raise error.Abort(_("cannot change branch of non-linear revisions"))
+ rewriteutil.precheck(repo, revs, 'change branch of')
+
+ root = repo[roots.first()]
+ if not root.p1().branch() == label and label in repo.branchmap():
+ raise error.Abort(_("a branch of the same name already exists"))
+
+ if repo.revs('merge() and %ld', revs):
+ raise error.Abort(_("cannot change branch of a merge commit"))
+ if repo.revs('obsolete() and %ld', revs):
+ raise error.Abort(_("cannot change branch of a obsolete changeset"))
+
+ # make sure only topological heads
+ if repo.revs('heads(%ld) - head()', revs):
+ raise error.Abort(_("cannot change branch in middle of a stack"))
+
+ replacements = {}
+ # avoid import cycle mercurial.cmdutil -> mercurial.context ->
+ # mercurial.subrepo -> mercurial.cmdutil
+ from . import context
+ for rev in revs:
+ ctx = repo[rev]
+ oldbranch = ctx.branch()
+ # check if ctx has same branch
+ if oldbranch == label:
+ continue
+
+ def filectxfn(repo, newctx, path):
+ try:
+ return ctx[path]
+ except error.ManifestLookupError:
+ return None
+
+ ui.debug("changing branch of '%s' from '%s' to '%s'\n"
+ % (hex(ctx.node()), oldbranch, label))
+ extra = ctx.extra()
+ extra['branch_change'] = hex(ctx.node())
+ # While changing branch of set of linear commits, make sure that
+ # we base our commits on new parent rather than old parent which
+ # was obsoleted while changing the branch
+ p1 = ctx.p1().node()
+ p2 = ctx.p2().node()
+ if p1 in replacements:
+ p1 = replacements[p1][0]
+ if p2 in replacements:
+ p2 = replacements[p2][0]
+
+ mc = context.memctx(repo, (p1, p2),
+ ctx.description(),
+ ctx.files(),
+ filectxfn,
+ user=ctx.user(),
+ date=ctx.date(),
+ extra=extra,
+ branch=label)
+
+ commitphase = ctx.phase()
+ overrides = {('phases', 'new-commit'): commitphase}
+ with repo.ui.configoverride(overrides, 'branch-change'):
+ newnode = repo.commitctx(mc)
+
+ replacements[ctx.node()] = (newnode,)
+ ui.debug('new node id is %s\n' % hex(newnode))
+
+ # create obsmarkers and move bookmarks
+ scmutil.cleanupnodes(repo, replacements, 'branch-change')
+
+ # move the working copy too
+ wctx = repo[None]
+ # in-progress merge is a bit too complex for now.
+ if len(wctx.parents()) == 1:
+ newid = replacements.get(wctx.p1().node())
+ if newid is not None:
+ # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
+ # mercurial.cmdutil
+ from . import hg
+ hg.update(repo, newid[0], quietempty=True)
+
+ ui.status(_("changed branch on %d changesets\n") % len(replacements))
+
def findrepo(p):
while not os.path.isdir(os.path.join(p, ".hg")):
oldp, p = p, os.path.dirname(p)
@@ -823,9 +916,9 @@
total=None, seqno=None, revwidth=None, pathname=None):
node_expander = {
'H': lambda: hex(node),
- 'R': lambda: str(repo.changelog.rev(node)),
+ 'R': lambda: '%d' % repo.changelog.rev(node),
'h': lambda: short(node),
- 'm': lambda: re.sub('[^\w]', '_', str(desc))
+ 'm': lambda: re.sub('[^\w]', '_', desc or '')
}
expander = {
'%': lambda: '%',
@@ -837,13 +930,13 @@
expander.update(node_expander)
if node:
expander['r'] = (lambda:
- str(repo.changelog.rev(node)).zfill(revwidth or 0))
+ ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
if total is not None:
- expander['N'] = lambda: str(total)
+ expander['N'] = lambda: '%d' % total
if seqno is not None:
- expander['n'] = lambda: str(seqno)
+ expander['n'] = lambda: '%d' % seqno
if total is not None and seqno is not None:
- expander['n'] = lambda: str(seqno).zfill(len(str(total)))
+ expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
if pathname is not None:
expander['s'] = lambda: os.path.basename(pathname)
expander['d'] = lambda: os.path.dirname(pathname) or '.'
@@ -1334,7 +1427,8 @@
if opts.get('exact'):
editor = None
else:
- editor = getcommiteditor(editform=editform, **opts)
+ editor = getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
extra = {}
for idfunc in extrapreimport:
extrapreimportmap[idfunc](repo, extractdata, extra, opts)
@@ -1518,7 +1612,7 @@
width = 80
if not ui.plain():
width = ui.termwidth()
- chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
+ chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
prefix=prefix, relroot=relroot,
hunksfilterfn=hunksfilterfn)
for chunk, label in patch.diffstatui(util.iterlines(chunks),
@@ -1526,7 +1620,7 @@
write(chunk, label=label)
else:
for chunk, label in patch.diffui(repo, node1, node2, match,
- changes, diffopts, prefix=prefix,
+ changes, opts=diffopts, prefix=prefix,
relroot=relroot,
hunksfilterfn=hunksfilterfn):
write(chunk, label=label)
@@ -1571,6 +1665,7 @@
self.hunk = {}
self.lastheader = None
self.footer = None
+ self._columns = templatekw.getlogcolumns()
def flush(self, ctx):
rev = ctx.rev()
@@ -1583,8 +1678,6 @@
if rev in self.hunk:
self.ui.write(self.hunk[rev])
del self.hunk[rev]
- return 1
- return 0
def close(self):
if self.footer:
@@ -1610,10 +1703,8 @@
label='log.node')
return
- date = util.datestr(ctx.date())
-
- # i18n: column positioning for "hg log"
- self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
+ columns = self._columns
+ self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
label=_changesetlabels(ctx))
# branches are shown first before any other names due to backwards
@@ -1621,9 +1712,7 @@
branch = ctx.branch()
# don't show the default branch name
if branch != 'default':
- # i18n: column positioning for "hg log"
- self.ui.write(_("branch: %s\n") % branch,
- label='log.branch')
+ self.ui.write(columns['branch'] % branch, label='log.branch')
for nsname, ns in self.repo.names.iteritems():
# branches has special logic already handled above, so here we just
@@ -1636,33 +1725,25 @@
self.ui.write(ns.logfmt % name,
label='log.%s' % ns.colorname)
if self.ui.debugflag:
- # i18n: column positioning for "hg log"
- self.ui.write(_("phase: %s\n") % ctx.phasestr(),
- label='log.phase')
+ self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
for pctx in scmutil.meaningfulparents(self.repo, ctx):
label = 'log.parent changeset.%s' % pctx.phasestr()
- # i18n: column positioning for "hg log"
- self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
+ self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
label=label)
if self.ui.debugflag and rev is not None:
mnode = ctx.manifestnode()
mrev = self.repo.manifestlog._revlog.rev(mnode)
- # i18n: column positioning for "hg log"
- self.ui.write(_("manifest: %s\n")
+ self.ui.write(columns['manifest']
% scmutil.formatrevnode(self.ui, mrev, mnode),
label='ui.debug log.manifest')
- # i18n: column positioning for "hg log"
- self.ui.write(_("user: %s\n") % ctx.user(),
- label='log.user')
- # i18n: column positioning for "hg log"
- self.ui.write(_("date: %s\n") % date,
+ self.ui.write(columns['user'] % ctx.user(), label='log.user')
+ self.ui.write(columns['date'] % util.datestr(ctx.date()),
label='log.date')
if ctx.isunstable():
- # i18n: column positioning for "hg log"
instabilities = ctx.instabilities()
- self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
+ self.ui.write(columns['instability'] % ', '.join(instabilities),
label='log.instability')
elif ctx.obsolete():
@@ -1672,31 +1753,22 @@
if self.ui.debugflag:
files = ctx.p1().status(ctx)[:3]
- for key, value in zip([# i18n: column positioning for "hg log"
- _("files:"),
- # i18n: column positioning for "hg log"
- _("files+:"),
- # i18n: column positioning for "hg log"
- _("files-:")], files):
+ for key, value in zip(['files', 'files+', 'files-'], files):
if value:
- self.ui.write("%-12s %s\n" % (key, " ".join(value)),
+ self.ui.write(columns[key] % " ".join(value),
label='ui.debug log.files')
elif ctx.files() and self.ui.verbose:
- # i18n: column positioning for "hg log"
- self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
+ self.ui.write(columns['files'] % " ".join(ctx.files()),
label='ui.note log.files')
if copies and self.ui.verbose:
copies = ['%s (%s)' % c for c in copies]
- # i18n: column positioning for "hg log"
- self.ui.write(_("copies: %s\n") % ' '.join(copies),
+ self.ui.write(columns['copies'] % ' '.join(copies),
label='ui.note log.copies')
extra = ctx.extra()
if extra and self.ui.debugflag:
for key, value in sorted(extra.items()):
- # i18n: column positioning for "hg log"
- self.ui.write(_("extra: %s=%s\n")
- % (key, util.escapestr(value)),
+ self.ui.write(columns['extra'] % (key, util.escapestr(value)),
label='ui.debug log.extra')
description = ctx.description().strip()
@@ -1708,9 +1780,7 @@
label='ui.note log.description')
self.ui.write("\n\n")
else:
- # i18n: column positioning for "hg log"
- self.ui.write(_("summary: %s\n") %
- description.splitlines()[0],
+ self.ui.write(columns['summary'] % description.splitlines()[0],
label='log.summary')
self.ui.write("\n")
@@ -1721,8 +1791,7 @@
if obsfate:
for obsfateline in obsfate:
- # i18n: column positioning for "hg log"
- self.ui.write(_("obsolete: %s\n") % obsfateline,
+ self.ui.write(self._columns['obsolete'] % obsfateline,
label='log.obsfate')
def _exthook(self, ctx):
@@ -1748,7 +1817,8 @@
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
match=matchfn, stat=False,
hunksfilterfn=hunksfilterfn)
- self.ui.write("\n")
+ if stat or diff:
+ self.ui.write("\n")
class jsonchangeset(changeset_printer):
'''format changeset information.'''
@@ -1850,7 +1920,13 @@
self.ui.write("\n }")
class changeset_templater(changeset_printer):
- '''format changeset information.'''
+ '''format changeset information.
+
+ Note: there are a variety of convenience functions to build a
+ changeset_templater for common cases. See functions such as:
+ makelogtemplater, show_changeset, buildcommittemplate, or other
+ functions that use changesest_templater.
+ '''
# Arguments before "buffered" used to be positional. Consider not
# adding/removing arguments before "buffered" to not break callers.
@@ -1859,10 +1935,13 @@
diffopts = diffopts or {}
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
+ tres = formatter.templateresources(ui, repo)
self.t = formatter.loadtemplater(ui, tmplspec,
+ defaults=templatekw.keywords,
+ resources=tres,
cache=templatekw.defaulttempl)
self._counter = itertools.count()
- self.cache = {}
+ self.cache = tres['cache'] # shared with _graphnodeformatter()
self._tref = tmplspec.ref
self._parts = {'header': '', 'footer': '',
@@ -1901,14 +1980,9 @@
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
'''show a single changeset or file revision'''
props = props.copy()
- props.update(templatekw.keywords)
- props['templ'] = self.t
props['ctx'] = ctx
- props['repo'] = self.repo
- props['ui'] = self.repo.ui
props['index'] = index = next(self._counter)
props['revcache'] = {'copies': copies}
- props['cache'] = self.cache
props = pycompat.strkwargs(props)
# write separator, which wouldn't work well with the header part below
@@ -1972,7 +2046,8 @@
return formatter.lookuptemplate(ui, 'changeset', tmpl)
def makelogtemplater(ui, repo, tmpl, buffered=False):
- """Create a changeset_templater from a literal template 'tmpl'"""
+ """Create a changeset_templater from a literal template 'tmpl'
+ byte-string."""
spec = logtemplatespec(tmpl, None)
return changeset_templater(ui, repo, spec, buffered=buffered)
@@ -2050,6 +2125,21 @@
if windowsize < sizelimit:
windowsize *= 2
+def _walkrevs(repo, opts):
+ # Default --rev value depends on --follow but --follow behavior
+ # depends on revisions resolved from --rev...
+ follow = opts.get('follow') or opts.get('follow_first')
+ if opts.get('rev'):
+ revs = scmutil.revrange(repo, opts['rev'])
+ elif follow and repo.dirstate.p1() == nullid:
+ revs = smartset.baseset()
+ elif follow:
+ revs = repo.revs('reverse(:.)')
+ else:
+ revs = smartset.spanset(repo)
+ revs.reverse()
+ return revs
+
class FileWalkError(Exception):
pass
@@ -2204,12 +2294,11 @@
function on each context in the window in forward order.'''
follow = opts.get('follow') or opts.get('follow_first')
- revs = _logrevs(repo, opts)
+ revs = _walkrevs(repo, opts)
if not revs:
return []
wanted = set()
- slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
- opts.get('removed'))
+ slowpath = match.anypats() or (not match.always() and opts.get('removed'))
fncache = {}
change = repo.changectx
@@ -2326,90 +2415,36 @@
return iterate()
-def _makefollowlogfilematcher(repo, files, followfirst):
- # When displaying a revision with --patch --follow FILE, we have
- # to know which file of the revision must be diffed. With
- # --follow, we want the names of the ancestors of FILE in the
- # revision, stored in "fcache". "fcache" is populated by
- # reproducing the graph traversal already done by --follow revset
- # and relating revs to file names (which is not "correct" but
- # good enough).
- fcache = {}
- fcacheready = [False]
- pctx = repo['.']
-
- def populate():
- for fn in files:
- fctx = pctx[fn]
- fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
- for c in fctx.ancestors(followfirst=followfirst):
- fcache.setdefault(c.rev(), set()).add(c.path())
-
- def filematcher(rev):
- if not fcacheready[0]:
- # Lazy initialization
- fcacheready[0] = True
- populate()
- return scmutil.matchfiles(repo, fcache.get(rev, []))
-
- return filematcher
-
-def _makenofollowlogfilematcher(repo, pats, opts):
- '''hook for extensions to override the filematcher for non-follow cases'''
- return None
-
-def _makelogrevset(repo, pats, opts, revs):
- """Return (expr, filematcher) where expr is a revset string built
- from log options and file patterns or None. If --stat or --patch
- are not passed filematcher is None. Otherwise it is a callable
- taking a revision number and returning a match objects filtering
- the files to be detailed when displaying the revision.
+def _makelogmatcher(repo, revs, pats, opts):
+ """Build matcher and expanded patterns from log options
+
+ If --follow, revs are the revisions to follow from.
+
+ Returns (match, pats, slowpath) where
+ - match: a matcher built from the given pats and -I/-X opts
+ - pats: patterns used (globs are expanded on Windows)
+ - slowpath: True if patterns aren't as simple as scanning filelogs
"""
- opt2revset = {
- 'no_merges': ('not merge()', None),
- 'only_merges': ('merge()', None),
- '_ancestors': ('ancestors(%(val)s)', None),
- '_fancestors': ('_firstancestors(%(val)s)', None),
- '_descendants': ('descendants(%(val)s)', None),
- '_fdescendants': ('_firstdescendants(%(val)s)', None),
- '_matchfiles': ('_matchfiles(%(val)s)', None),
- 'date': ('date(%(val)r)', None),
- 'branch': ('branch(%(val)r)', ' or '),
- '_patslog': ('filelog(%(val)r)', ' or '),
- '_patsfollow': ('follow(%(val)r)', ' or '),
- '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
- 'keyword': ('keyword(%(val)r)', ' or '),
- 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
- 'user': ('user(%(val)r)', ' or '),
- }
-
- opts = dict(opts)
- # follow or not follow?
- follow = opts.get('follow') or opts.get('follow_first')
- if opts.get('follow_first'):
- followfirst = 1
- else:
- followfirst = 0
- # --follow with FILE behavior depends on revs...
- it = iter(revs)
- startrev = next(it)
- followdescendants = startrev < next(it, startrev)
-
- # branch and only_branch are really aliases and must be handled at
- # the same time
- opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
- opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
# pats/include/exclude are passed to match.match() directly in
# _matchfiles() revset but walkchangerevs() builds its matcher with
# scmutil.match(). The difference is input pats are globbed on
# platforms without shell expansion (windows).
wctx = repo[None]
match, pats = scmutil.matchandpats(wctx, pats, opts)
- slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
- opts.get('removed'))
+ slowpath = match.anypats() or (not match.always() and opts.get('removed'))
if not slowpath:
+ follow = opts.get('follow') or opts.get('follow_first')
+ startctxs = []
+ if follow and opts.get('rev'):
+ startctxs = [repo[r] for r in revs]
for f in match.files():
- if follow and f not in wctx:
+ if follow and startctxs:
+ # No idea if the path was a directory at that revision, so
+ # take the slow path.
+ if any(f not in c for c in startctxs):
+ slowpath = True
+ continue
+ elif follow and f not in wctx:
# If the file exists, it may be a directory, so let it
# take the slow path.
if os.path.exists(repo.wjoin(f)):
@@ -2417,7 +2452,7 @@
continue
else:
raise error.Abort(_('cannot follow file not in parent '
- 'revision: "%s"') % f)
+ 'revision: "%s"') % f)
filelog = repo.file(f)
if not filelog:
# A zero count may be a directory or deleted file, so
@@ -2438,15 +2473,62 @@
else:
slowpath = False
- fpats = ('_patsfollow', '_patsfollowfirst')
- fnopats = (('_ancestors', '_fancestors'),
- ('_descendants', '_fdescendants'))
+ return match, pats, slowpath
+
+def _fileancestors(repo, revs, match, followfirst):
+ fctxs = []
+ for r in revs:
+ ctx = repo[r]
+ fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
+
+ # When displaying a revision with --patch --follow FILE, we have
+ # to know which file of the revision must be diffed. With
+ # --follow, we want the names of the ancestors of FILE in the
+ # revision, stored in "fcache". "fcache" is populated as a side effect
+ # of the graph traversal.
+ fcache = {}
+ def filematcher(rev):
+ return scmutil.matchfiles(repo, fcache.get(rev, []))
+
+ def revgen():
+ for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
+ fcache[rev] = [c.path() for c in cs]
+ yield rev
+ return smartset.generatorset(revgen(), iterasc=False), filematcher
+
+def _makenofollowlogfilematcher(repo, pats, opts):
+ '''hook for extensions to override the filematcher for non-follow cases'''
+ return None
+
+_opt2logrevset = {
+ 'no_merges': ('not merge()', None),
+ 'only_merges': ('merge()', None),
+ '_matchfiles': (None, '_matchfiles(%ps)'),
+ 'date': ('date(%s)', None),
+ 'branch': ('branch(%s)', '%lr'),
+ '_patslog': ('filelog(%s)', '%lr'),
+ 'keyword': ('keyword(%s)', '%lr'),
+ 'prune': ('ancestors(%s)', 'not %lr'),
+ 'user': ('user(%s)', '%lr'),
+}
+
+def _makelogrevset(repo, match, pats, slowpath, opts):
+ """Return a revset string built from log options and file patterns"""
+ opts = dict(opts)
+ # follow or not follow?
+ follow = opts.get('follow') or opts.get('follow_first')
+
+ # branch and only_branch are really aliases and must be handled at
+ # the same time
+ opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
+ opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
+
if slowpath:
# See walkchangerevs() slow path.
#
# pats/include/exclude cannot be represented as separate
# revset expressions as their filtering logic applies at file
- # level. For instance "-I a -X a" matches a revision touching
+ # level. For instance "-I a -X b" matches a revision touching
# "a" and "b" while "file(a) and not file(b)" does
# not. Besides, filesets are evaluated against the working
# directory.
@@ -2457,130 +2539,84 @@
matchargs.append('i:' + p)
for p in opts.get('exclude', []):
matchargs.append('x:' + p)
- matchargs = ','.join(('%r' % p) for p in matchargs)
opts['_matchfiles'] = matchargs
- if follow:
- opts[fnopats[0][followfirst]] = '.'
- else:
- if follow:
- if pats:
- # follow() revset interprets its file argument as a
- # manifest entry, so use match.files(), not pats.
- opts[fpats[followfirst]] = list(match.files())
- else:
- op = fnopats[followdescendants][followfirst]
- opts[op] = 'rev(%d)' % startrev
- else:
- opts['_patslog'] = list(pats)
-
- filematcher = None
- if opts.get('patch') or opts.get('stat'):
- # When following files, track renames via a special matcher.
- # If we're forced to take the slowpath it means we're following
- # at least one pattern/directory, so don't bother with rename tracking.
- if follow and not match.always() and not slowpath:
- # _makefollowlogfilematcher expects its files argument to be
- # relative to the repo root, so use match.files(), not pats.
- filematcher = _makefollowlogfilematcher(repo, match.files(),
- followfirst)
- else:
- filematcher = _makenofollowlogfilematcher(repo, pats, opts)
- if filematcher is None:
- filematcher = lambda rev: match
+ elif not follow:
+ opts['_patslog'] = list(pats)
expr = []
for op, val in sorted(opts.iteritems()):
if not val:
continue
- if op not in opt2revset:
+ if op not in _opt2logrevset:
continue
- revop, andor = opt2revset[op]
- if '%(val)' not in revop:
+ revop, listop = _opt2logrevset[op]
+ if revop and '%' not in revop:
expr.append(revop)
+ elif not listop:
+ expr.append(revsetlang.formatspec(revop, val))
else:
- if not isinstance(val, list):
- e = revop % {'val': val}
- else:
- e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
- expr.append(e)
+ if revop:
+ val = [revsetlang.formatspec(revop, v) for v in val]
+ expr.append(revsetlang.formatspec(listop, val))
if expr:
expr = '(' + ' and '.join(expr) + ')'
else:
expr = None
- return expr, filematcher
+ return expr
def _logrevs(repo, opts):
- # Default --rev value depends on --follow but --follow behavior
- # depends on revisions resolved from --rev...
+ """Return the initial set of revisions to be filtered or followed"""
follow = opts.get('follow') or opts.get('follow_first')
if opts.get('rev'):
revs = scmutil.revrange(repo, opts['rev'])
elif follow and repo.dirstate.p1() == nullid:
revs = smartset.baseset()
elif follow:
- revs = repo.revs('reverse(:.)')
+ revs = repo.revs('.')
else:
revs = smartset.spanset(repo)
revs.reverse()
return revs
-def getgraphlogrevs(repo, pats, opts):
- """Return (revs, expr, filematcher) where revs is an iterable of
- revision numbers, expr is a revset string built from log options
- and file patterns or None, and used to filter 'revs'. If --stat or
- --patch are not passed filematcher is None. Otherwise it is a
- callable taking a revision number and returning a match objects
- filtering the files to be detailed when displaying the revision.
+def getlogrevs(repo, pats, opts):
+ """Return (revs, filematcher) where revs is a smartset
+
+ filematcher is a callable taking a revision number and returning a match
+ objects filtering the files to be detailed when displaying the revision.
"""
+ follow = opts.get('follow') or opts.get('follow_first')
+ followfirst = opts.get('follow_first')
limit = loglimit(opts)
revs = _logrevs(repo, opts)
if not revs:
- return smartset.baseset(), None, None
- expr, filematcher = _makelogrevset(repo, pats, opts, revs)
- if opts.get('rev'):
+ return smartset.baseset(), None
+ match, pats, slowpath = _makelogmatcher(repo, revs, pats, opts)
+ filematcher = None
+ if follow:
+ if slowpath or match.always():
+ revs = dagop.revancestors(repo, revs, followfirst=followfirst)
+ else:
+ revs, filematcher = _fileancestors(repo, revs, match, followfirst)
+ revs.reverse()
+ if filematcher is None:
+ filematcher = _makenofollowlogfilematcher(repo, pats, opts)
+ if filematcher is None:
+ def filematcher(rev):
+ return match
+
+ expr = _makelogrevset(repo, match, pats, slowpath, opts)
+ if opts.get('graph') and opts.get('rev'):
# User-specified revs might be unsorted, but don't sort before
# _makelogrevset because it might depend on the order of revs
if not (revs.isdescending() or revs.istopo()):
revs.sort(reverse=True)
if expr:
- matcher = revset.match(repo.ui, expr)
+ matcher = revset.match(None, expr)
revs = matcher(repo, revs)
if limit is not None:
- limitedrevs = []
- for idx, rev in enumerate(revs):
- if idx >= limit:
- break
- limitedrevs.append(rev)
- revs = smartset.baseset(limitedrevs)
-
- return revs, expr, filematcher
-
-def getlogrevs(repo, pats, opts):
- """Return (revs, expr, filematcher) where revs is an iterable of
- revision numbers, expr is a revset string built from log options
- and file patterns or None, and used to filter 'revs'. If --stat or
- --patch are not passed filematcher is None. Otherwise it is a
- callable taking a revision number and returning a match objects
- filtering the files to be detailed when displaying the revision.
- """
- limit = loglimit(opts)
- revs = _logrevs(repo, opts)
- if not revs:
- return smartset.baseset([]), None, None
- expr, filematcher = _makelogrevset(repo, pats, opts, revs)
- if expr:
- matcher = revset.match(repo.ui, expr)
- revs = matcher(repo, revs)
- if limit is not None:
- limitedrevs = []
- for idx, r in enumerate(revs):
- if limit <= idx:
- break
- limitedrevs.append(r)
- revs = smartset.baseset(limitedrevs)
-
- return revs, expr, filematcher
+ revs = revs.slice(0, limit)
+ return revs, filematcher
def _parselinerangelogopt(repo, opts):
"""Parse --line-range log option and return a list of tuples (filename,
@@ -2675,18 +2711,13 @@
return templatekw.showgraphnode # fast path for "{graphnode}"
spec = templater.unquotestring(spec)
- templ = formatter.maketemplater(ui, spec)
- cache = {}
+ tres = formatter.templateresources(ui)
if isinstance(displayer, changeset_templater):
- cache = displayer.cache # reuse cache of slow templates
- props = templatekw.keywords.copy()
- props['templ'] = templ
- props['cache'] = cache
+ tres['cache'] = displayer.cache # reuse cache of slow templates
+ templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords,
+ resources=tres)
def formatnode(repo, ctx):
- props['ctx'] = ctx
- props['repo'] = repo
- props['ui'] = repo.ui
- props['revcache'] = {}
+ props = {'ctx': ctx, 'repo': repo, 'revcache': {}}
return templ.render(props)
return formatnode
@@ -2733,7 +2764,7 @@
firstedge = next(edges)
width = firstedge[2]
displayer.show(ctx, copies=copies, matchfn=revmatchfn,
- _graphwidth=width, **props)
+ _graphwidth=width, **pycompat.strkwargs(props))
lines = displayer.hunk.pop(rev).split('\n')
if not lines[-1]:
del lines[-1]
@@ -2743,9 +2774,8 @@
lines = []
displayer.close()
-def graphlog(ui, repo, pats, opts):
+def graphlog(ui, repo, revs, filematcher, opts):
# Parameters are identical to log command ones
- revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
revdag = graphmod.dagwalker(repo, revs)
getrenamed = None
@@ -2975,8 +3005,9 @@
for f in remaining:
count += 1
ui.progress(_('skipping'), count, total=total, unit=_('files'))
- warnings.append(_('not removing %s: file still exists\n')
- % m.rel(f))
+ if ui.verbose or (f in files):
+ warnings.append(_('not removing %s: file still exists\n')
+ % m.rel(f))
ret = 1
ui.progress(_('skipping'), None)
else:
@@ -3021,21 +3052,34 @@
return ret
+def _updatecatformatter(fm, ctx, matcher, path, decode):
+ """Hook for adding data to the formatter used by ``hg cat``.
+
+ Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
+ this method first."""
+ data = ctx[path].data()
+ if decode:
+ data = ctx.repo().wwritedata(path, data)
+ fm.startitem()
+ fm.write('data', '%s', data)
+ fm.data(abspath=path, path=matcher.rel(path))
+
def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
err = 1
+ opts = pycompat.byteskwargs(opts)
def write(path):
filename = None
if fntemplate:
filename = makefilename(repo, fntemplate, ctx.node(),
pathname=os.path.join(prefix, path))
+ # attempt to create the directory if it does not already exist
+ try:
+ os.makedirs(os.path.dirname(filename))
+ except OSError:
+ pass
with formatter.maybereopen(basefm, filename, opts) as fm:
- data = ctx[path].data()
- if opts.get('decode'):
- data = repo.wwritedata(path, data)
- fm.startitem()
- fm.write('data', '%s', data)
- fm.data(abspath=path, path=matcher.rel(path))
+ _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
# Automation often uses hg cat on single files, so special case it
# for performance to avoid the cost of parsing the manifest.
@@ -3060,7 +3104,8 @@
submatch = matchmod.subdirmatcher(subpath, matcher)
if not sub.cat(submatch, basefm, fntemplate,
- os.path.join(prefix, sub._path), **opts):
+ os.path.join(prefix, sub._path),
+ **pycompat.strkwargs(opts)):
err = 0
except error.RepoLookupError:
ui.status(_("skipping missing subrepository: %s\n")
@@ -3124,6 +3169,8 @@
# base o - first parent of the changeset to amend
wctx = repo[None]
+ # Copy to avoid mutating input
+ extra = extra.copy()
# Update extra dict from amended commit (e.g. to preserve graft
# source)
extra.update(old.extra())
@@ -3200,7 +3247,7 @@
fctx = wctx[path]
flags = fctx.flags()
- mctx = context.memfilectx(repo,
+ mctx = context.memfilectx(repo, ctx_,
fctx.path(), fctx.data(),
islink='l' in flags,
isexec='x' in flags,
@@ -3445,6 +3492,7 @@
return repo.status(match=scmutil.match(repo[None], pats, opts))
def revert(ui, repo, ctx, parents, *pats, **opts):
+ opts = pycompat.byteskwargs(opts)
parent, p2 = parents
node = ctx.node()
@@ -3706,7 +3754,7 @@
else:
util.rename(target, bakname)
if ui.verbose or not exact:
- if not isinstance(msg, basestring):
+ if not isinstance(msg, bytes):
msg = msg(abs)
ui.status(msg % rel)
elif exact:
@@ -3722,7 +3770,8 @@
# Revert the subrepos on the revert list
for sub in targetsubs:
try:
- wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
+ wctx.sub(sub).revert(ctx.substate[sub], *pats,
+ **pycompat.strkwargs(opts))
except KeyError:
raise error.Abort("subrepository '%s' does not exist in %s!"
% (sub, short(ctx.node())))
@@ -3802,9 +3851,8 @@
operation = 'discard'
reversehunks = True
if node != parent:
- operation = 'revert'
- reversehunks = repo.ui.configbool('experimental',
- 'revertalternateinteractivemode')
+ operation = 'apply'
+ reversehunks = False
if reversehunks:
diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
else:
@@ -3869,6 +3917,7 @@
repo.dirstate.copy(copied[f], f)
class command(registrar.command):
+ """deprecated: used registrar.command instead"""
def _doregister(self, func, name, *args, **kwargs):
func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
return super(command, self)._doregister(func, name, *args, **kwargs)
--- a/mercurial/color.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/color.py Mon Jan 22 17:53:02 2018 -0500
@@ -87,12 +87,14 @@
'branches.inactive': 'none',
'diff.changed': 'white',
'diff.deleted': 'red',
+ 'diff.deleted.highlight': 'red bold underline',
'diff.diffline': 'bold',
'diff.extended': 'cyan bold',
'diff.file_a': 'red bold',
'diff.file_b': 'green bold',
'diff.hunk': 'magenta',
'diff.inserted': 'green',
+ 'diff.inserted.highlight': 'green bold underline',
'diff.tab': '',
'diff.trailingwhitespace': 'bold red_background',
'changeset.public': '',
@@ -100,6 +102,15 @@
'changeset.secret': '',
'diffstat.deleted': 'red',
'diffstat.inserted': 'green',
+ 'formatvariant.name.mismatchconfig': 'red',
+ 'formatvariant.name.mismatchdefault': 'yellow',
+ 'formatvariant.name.uptodate': 'green',
+ 'formatvariant.repo.mismatchconfig': 'red',
+ 'formatvariant.repo.mismatchdefault': 'yellow',
+ 'formatvariant.repo.uptodate': 'green',
+ 'formatvariant.config.special': 'yellow',
+ 'formatvariant.config.default': 'green',
+ 'formatvariant.default': '',
'histedit.remaining': 'red bold',
'ui.prompt': 'yellow',
'log.changeset': 'yellow',
@@ -181,7 +192,7 @@
configstyles(ui)
def _modesetup(ui):
- if ui.plain():
+ if ui.plain('color'):
return None
config = ui.config('ui', 'color')
if config == 'debug':
@@ -473,7 +484,7 @@
_win32print(ui, text, writefunc, **opts)
def _win32print(ui, text, writefunc, **opts):
- label = opts.get('label', '')
+ label = opts.get(r'label', '')
attr = origattr
def mapcolor(val, attr):
--- a/mercurial/commands.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/commands.py Mon Jan 22 17:53:02 2018 -0500
@@ -43,12 +43,14 @@
lock as lockmod,
merge as mergemod,
obsolete,
+ obsutil,
patch,
phases,
pycompat,
rcutil,
registrar,
revsetlang,
+ rewriteutil,
scmutil,
server,
sshserver,
@@ -65,6 +67,7 @@
table.update(debugcommandsmod.command._table)
command = registrar.command(table)
+readonly = registrar.command.readonly
# common command options
@@ -102,10 +105,6 @@
_("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
]
-# options which must be pre-parsed before loading configs and extensions
-# TODO: perhaps --debugger should be included
-earlyoptflags = ("--cwd", "-R", "--repository", "--repo", "--config")
-
dryrunopts = cmdutil.dryrunopts
remoteopts = cmdutil.remoteopts
walkopts = cmdutil.walkopts
@@ -295,7 +294,10 @@
# to mimic the behavior of Mercurial before version 1.5
opts['file'] = True
- ctx = scmutil.revsingle(repo, opts.get('rev'))
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev)
rootfm = ui.formatter('annotate', opts)
if ui.quiet:
@@ -466,7 +468,10 @@
'''
opts = pycompat.byteskwargs(opts)
- ctx = scmutil.revsingle(repo, opts.get('rev'))
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev)
if not ctx:
raise error.Abort(_('no working directory: please specify a revision'))
node = ctx.node()
@@ -857,7 +862,7 @@
ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
hbisect.checkstate(state)
# bisect
- nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
+ nodes, changesets, bgood = hbisect.bisect(repo, state)
# update to next check
node = nodes[0]
mayupdate(repo, node, show_stats=False)
@@ -870,7 +875,7 @@
hbisect.checkstate(state)
# actually bisect
- nodes, changesets, good = hbisect.bisect(repo.changelog, state)
+ nodes, changesets, good = hbisect.bisect(repo, state)
if extend:
if not changesets:
extendnode = hbisect.extendrange(repo, state, nodes, good)
@@ -997,7 +1002,9 @@
@command('branch',
[('f', 'force', None,
_('set branch name even if it shadows an existing branch')),
- ('C', 'clean', None, _('reset branch name to parent branch name'))],
+ ('C', 'clean', None, _('reset branch name to parent branch name')),
+ ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
+ ],
_('[-fC] [NAME]'))
def branch(ui, repo, label=None, **opts):
"""set or show the current branch name
@@ -1029,10 +1036,13 @@
Returns 0 on success.
"""
opts = pycompat.byteskwargs(opts)
+ revs = opts.get('rev')
if label:
label = label.strip()
if not opts.get('clean') and not label:
+ if revs:
+ raise error.Abort(_("no branch name specified for the revisions"))
ui.write("%s\n" % repo.dirstate.branch())
return
@@ -1042,13 +1052,18 @@
repo.dirstate.setbranch(label)
ui.status(_('reset working directory to branch %s\n') % label)
elif label:
+
+ scmutil.checknewlabel(repo, label, 'branch')
+ if revs:
+ return cmdutil.changebranch(ui, repo, revs, label)
+
if not opts.get('force') and label in repo.branchmap():
if label not in [p.branch() for p in repo[None].parents()]:
raise error.Abort(_('a branch of the same name already'
' exists'),
# i18n: "it" refers to an existing branch
hint=_("use 'hg update' to switch to it"))
- scmutil.checknewlabel(repo, label, 'branch')
+
repo.dirstate.setbranch(label)
ui.status(_('marked working directory as branch %s\n') % label)
@@ -1064,7 +1079,7 @@
_('show only branches that have unmerged heads (DEPRECATED)')),
('c', 'closed', False, _('show normal and closed branches')),
] + formatteropts,
- _('[-c]'))
+ _('[-c]'), cmdtype=readonly)
def branches(ui, repo, active=False, closed=False, **opts):
"""list repository named branches
@@ -1258,7 +1273,7 @@
('', 'decode', None, _('apply any matching decode filter')),
] + walkopts + formatteropts,
_('[OPTION]... FILE...'),
- inferrepo=True)
+ inferrepo=True, cmdtype=readonly)
def cat(ui, repo, file1, *pats, **opts):
"""output the current or given revision of files
@@ -1280,7 +1295,11 @@
Returns 0 on success.
"""
- ctx = scmutil.revsingle(repo, opts.get('rev'))
+ opts = pycompat.byteskwargs(opts)
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev)
m = scmutil.match(ctx, (file1,) + pats, opts)
fntemplate = opts.pop('output', '')
if cmdutil.isstdiofilename(fntemplate):
@@ -1292,7 +1311,8 @@
ui.pager('cat')
fm = ui.formatter('cat', opts)
with fm:
- return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
+ return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
+ **pycompat.strkwargs(opts))
@command('^clone',
[('U', 'noupdate', None, _('the clone will include an empty working '
@@ -1544,13 +1564,7 @@
raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
old = repo['.']
- if not old.mutable():
- raise error.Abort(_('cannot amend public changesets'))
- if len(repo[None].parents()) > 1:
- raise error.Abort(_('cannot amend while merging'))
- allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
- if not allowunstable and old.children():
- raise error.Abort(_('cannot amend changeset with children'))
+ rewriteutil.precheck(repo, [old.rev()], 'amend')
# Currently histedit gets confused if an amend happens while histedit
# is in progress. Since we have a checkunfinished command, we are
@@ -1604,7 +1618,7 @@
('l', 'local', None, _('edit repository config')),
('g', 'global', None, _('edit global config'))] + formatteropts,
_('[-u] [NAME]...'),
- optionalrepo=True)
+ optionalrepo=True, cmdtype=readonly)
def config(ui, repo, *values, **opts):
"""show combined config settings from all hgrc files
@@ -1751,7 +1765,7 @@
def debugcomplete(ui, cmd='', **opts):
"""returns the completion list associated with the given command"""
- if opts.get('options'):
+ if opts.get(r'options'):
options = []
otables = [globalopts]
if cmd:
@@ -1777,7 +1791,7 @@
('c', 'change', '', _('change made by revision'), _('REV'))
] + diffopts + diffopts2 + walkopts + subrepoopts,
_('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
- inferrepo=True)
+ inferrepo=True, cmdtype=readonly)
def diff(ui, repo, *pats, **opts):
"""diff repository (or selected files)
@@ -1846,9 +1860,11 @@
msg = _('cannot specify --rev and --change at the same time')
raise error.Abort(msg)
elif change:
+ repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
node2 = scmutil.revsingle(repo, change, None).node()
node1 = repo[node2].p1().node()
else:
+ repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
node1, node2 = scmutil.revpair(repo, revs)
if reverse:
@@ -1867,7 +1883,7 @@
('', 'switch-parent', None, _('diff against the second parent')),
('r', 'rev', [], _('revisions to export'), _('REV')),
] + diffopts,
- _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
+ _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), cmdtype=readonly)
def export(ui, repo, *changesets, **opts):
"""dump the header and diffs for one or more changesets
@@ -1932,6 +1948,7 @@
changesets += tuple(opts.get('rev', []))
if not changesets:
changesets = ['.']
+ repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
revs = scmutil.revrange(repo, changesets)
if not revs:
raise error.Abort(_("export requires at least one changeset"))
@@ -1948,7 +1965,7 @@
[('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
] + walkopts + formatteropts + subrepoopts,
- _('[OPTION]... [FILE]...'))
+ _('[OPTION]... [FILE]...'), cmdtype=readonly)
def files(ui, repo, *pats, **opts):
"""list tracked files
@@ -1995,7 +2012,10 @@
"""
opts = pycompat.byteskwargs(opts)
- ctx = scmutil.revsingle(repo, opts.get('rev'), None)
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev, None)
end = '\n'
if opts.get('print0'):
@@ -2321,7 +2341,7 @@
('d', 'date', None, _('list the date (short with -q)')),
] + formatteropts + walkopts,
_('[OPTION]... PATTERN [FILE]...'),
- inferrepo=True)
+ inferrepo=True, cmdtype=readonly)
def grep(ui, repo, pattern, *pats, **opts):
"""search revision history for a pattern in specified files
@@ -2564,7 +2584,7 @@
('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
('c', 'closed', False, _('show normal and closed branch heads')),
] + templateopts,
- _('[-ct] [-r STARTREV] [REV]...'))
+ _('[-ct] [-r STARTREV] [REV]...'), cmdtype=readonly)
def heads(ui, repo, *branchrevs, **opts):
"""show branch heads
@@ -2592,8 +2612,10 @@
opts = pycompat.byteskwargs(opts)
start = None
- if 'rev' in opts:
- start = scmutil.revsingle(repo, opts['rev'], None).node()
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ start = scmutil.revsingle(repo, rev, None).node()
if opts.get('topo'):
heads = [repo[h] for h in repo.heads(start)]
@@ -2637,7 +2659,7 @@
('s', 'system', [], _('show help for specific platform(s)')),
],
_('[-ecks] [TOPIC]'),
- norepo=True)
+ norepo=True, cmdtype=readonly)
def help_(ui, name=None, **opts):
"""show help for a given topic or a help overview
@@ -2679,7 +2701,7 @@
('B', 'bookmarks', None, _('show bookmarks')),
] + remoteopts + formatteropts,
_('[-nibtB] [-r REV] [SOURCE]'),
- optionalrepo=True)
+ optionalrepo=True, cmdtype=readonly)
def identify(ui, repo, source=None, rev=None,
num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
"""identify the working directory or specified revision
@@ -2777,6 +2799,8 @@
fm.data(node=hex(remoterev))
fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
else:
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
ctx = scmutil.revsingle(repo, rev, None)
if ctx.rev() is None:
@@ -3254,7 +3278,7 @@
_('do not display revision or any of its ancestors'), _('REV')),
] + logopts + walkopts,
_('[OPTION]... [FILE]'),
- inferrepo=True)
+ inferrepo=True, cmdtype=readonly)
def log(ui, repo, *pats, **opts):
"""show revision history of entire repository or files
@@ -3268,7 +3292,7 @@
File history is shown without following rename or copy history of
files. Use -f/--follow with a filename to follow history across
renames and copies. --follow without a filename will only show
- ancestors or descendants of the starting revision.
+ ancestors of the starting revision.
By default this command prints revision number and changeset id,
tags, non-trivial parents, user, date and time, and a summary for
@@ -3393,17 +3417,14 @@
_('FILE arguments are not compatible with --line-range option')
)
- if opts.get('follow') and opts.get('rev'):
- opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
- del opts['follow']
+ repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
+ revs, filematcher = cmdutil.getlogrevs(repo, pats, opts)
+ hunksfilter = None
if opts.get('graph'):
if linerange:
raise error.Abort(_('graph not supported with line range patterns'))
- return cmdutil.graphlog(ui, repo, pats, opts)
-
- revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
- hunksfilter = None
+ return cmdutil.graphlog(ui, repo, revs, filematcher, opts)
if linerange:
revs, lrfilematcher, hunksfilter = cmdutil.getloglinerangerevs(
@@ -3420,9 +3441,6 @@
elif filematcher is None:
filematcher = lrfilematcher
- limit = cmdutil.loglimit(opts)
- count = 0
-
getrenamed = None
if opts.get('copies'):
endrev = None
@@ -3433,8 +3451,6 @@
ui.pager('log')
displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
for rev in revs:
- if count == limit:
- break
ctx = repo[rev]
copies = None
if getrenamed is not None and rev:
@@ -3453,8 +3469,7 @@
revhunksfilter = None
displayer.show(ctx, copies=copies, matchfn=revmatchfn,
hunksfilterfn=revhunksfilter)
- if displayer.flush(ctx):
- count += 1
+ displayer.flush(ctx)
displayer.close()
@@ -3462,7 +3477,7 @@
[('r', 'rev', '', _('revision to display'), _('REV')),
('', 'all', False, _("list files from all revisions"))]
+ formatteropts,
- _('[-r REV]'))
+ _('[-r REV]'), cmdtype=readonly)
def manifest(ui, repo, node=None, rev=None, **opts):
"""output the current or given revision of the project manifest
@@ -3509,6 +3524,8 @@
char = {'l': '@', 'x': '*', '': ''}
mode = {'l': '644', 'x': '755', '': '644'}
+ if node:
+ repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
ctx = scmutil.revsingle(repo, node)
mf = ctx.manifest()
ui.pager('manifest')
@@ -3525,7 +3542,8 @@
_('force a merge including outstanding changes (DEPRECATED)')),
('r', 'rev', '', _('revision to merge'), _('REV')),
('P', 'preview', None,
- _('review revisions to merge (no merge is performed)'))
+ _('review revisions to merge (no merge is performed)')),
+ ('', 'abort', None, _('abort the ongoing merge')),
] + mergetoolopts,
_('[-P] [[-r] REV]'))
def merge(ui, repo, node=None, **opts):
@@ -3550,7 +3568,7 @@
See :hg:`help resolve` for information on handling file conflicts.
- To undo an uncommitted merge, use :hg:`update --clean .` which
+ To undo an uncommitted merge, use :hg:`merge --abort` which
will check out a clean copy of the original merge parent, losing
all changes.
@@ -3558,6 +3576,16 @@
"""
opts = pycompat.byteskwargs(opts)
+ abort = opts.get('abort')
+ if abort and repo.dirstate.p2() == nullid:
+ cmdutil.wrongtooltocontinue(repo, _('merge'))
+ if abort:
+ if node:
+ raise error.Abort(_("cannot specify a node with --abort"))
+ if opts.get('rev'):
+ raise error.Abort(_("cannot specify both --rev and --abort"))
+ if opts.get('preview'):
+ raise error.Abort(_("cannot specify --preview with --abort"))
if opts.get('rev') and node:
raise error.Abort(_("please specify just one revision"))
if not node:
@@ -3566,7 +3594,7 @@
if node:
node = scmutil.revsingle(repo, node).node()
- if not node:
+ if not node and not abort:
node = repo[destutil.destmerge(repo)].node()
if opts.get('preview'):
@@ -3587,7 +3615,7 @@
force = opts.get('force')
labels = ['working copy', 'merge rev']
return hg.merge(repo, node, force=force, mergeforce=force,
- labels=labels)
+ labels=labels, abort=abort)
finally:
ui.setconfig('ui', 'forcemerge', '', 'merge')
@@ -3696,7 +3724,10 @@
"""
opts = pycompat.byteskwargs(opts)
- ctx = scmutil.revsingle(repo, opts.get('rev'), None)
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev, None)
if file_:
m = scmutil.match(ctx, (file_,), opts)
@@ -3726,7 +3757,8 @@
displayer.show(repo[n])
displayer.close()
-@command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
+@command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
+ cmdtype=readonly)
def paths(ui, repo, search=None, **opts):
"""show aliases for remote repositories
@@ -3841,7 +3873,6 @@
revs = scmutil.revrange(repo, revs)
- lock = None
ret = 0
if targetphase is None:
# display
@@ -3849,10 +3880,7 @@
ctx = repo[r]
ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
else:
- tr = None
- lock = repo.lock()
- try:
- tr = repo.transaction("phase")
+ with repo.lock(), repo.transaction("phase") as tr:
# set phase
if not revs:
raise error.Abort(_('empty revision set'))
@@ -3865,11 +3893,6 @@
phases.advanceboundary(repo, tr, targetphase, nodes)
if opts['force']:
phases.retractboundary(repo, tr, targetphase, nodes)
- tr.close()
- finally:
- if tr is not None:
- tr.release()
- lock.release()
getphase = unfi._phasecache.phase
newdata = [getphase(unfi, r) for r in unfi]
changes = sum(newdata[r] != olddata[r] for r in unfi)
@@ -3923,7 +3946,7 @@
@command('^pull',
[('u', 'update', None,
- _('update to new branch head if changesets were pulled')),
+ _('update to new branch head if new descendants were pulled')),
('f', 'force', None, _('run even when remote repository is unrelated')),
('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
@@ -3978,12 +4001,13 @@
# not ending up with the name of the bookmark because of a race
# condition on the server. (See issue 4689 for details)
remotebookmarks = other.listkeys('bookmarks')
+ remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
pullopargs['remotebookmarks'] = remotebookmarks
for b in opts['bookmark']:
b = repo._bookmarks.expandname(b)
if b not in remotebookmarks:
raise error.Abort(_('remote bookmark %s not found!') % b)
- revs.append(remotebookmarks[b])
+ revs.append(hex(remotebookmarks[b]))
if revs:
try:
@@ -4002,36 +4026,40 @@
"so a rev cannot be specified.")
raise error.Abort(err)
- pullopargs.update(opts.get('opargs', {}))
- modheads = exchange.pull(repo, other, heads=revs,
- force=opts.get('force'),
- bookmarks=opts.get('bookmark', ()),
- opargs=pullopargs).cgresult
-
- # brev is a name, which might be a bookmark to be activated at
- # the end of the update. In other words, it is an explicit
- # destination of the update
- brev = None
-
- if checkout:
- checkout = str(repo.changelog.rev(checkout))
-
- # order below depends on implementation of
- # hg.addbranchrevs(). opts['bookmark'] is ignored,
- # because 'checkout' is determined without it.
- if opts.get('rev'):
- brev = opts['rev'][0]
- elif opts.get('branch'):
- brev = opts['branch'][0]
- else:
- brev = branches[0]
- repo._subtoppath = source
- try:
- ret = postincoming(ui, repo, modheads, opts.get('update'),
- checkout, brev)
-
- finally:
- del repo._subtoppath
+ wlock = util.nullcontextmanager()
+ if opts.get('update'):
+ wlock = repo.wlock()
+ with wlock:
+ pullopargs.update(opts.get('opargs', {}))
+ modheads = exchange.pull(repo, other, heads=revs,
+ force=opts.get('force'),
+ bookmarks=opts.get('bookmark', ()),
+ opargs=pullopargs).cgresult
+
+ # brev is a name, which might be a bookmark to be activated at
+ # the end of the update. In other words, it is an explicit
+ # destination of the update
+ brev = None
+
+ if checkout:
+ checkout = str(repo.changelog.rev(checkout))
+
+ # order below depends on implementation of
+ # hg.addbranchrevs(). opts['bookmark'] is ignored,
+ # because 'checkout' is determined without it.
+ if opts.get('rev'):
+ brev = opts['rev'][0]
+ elif opts.get('branch'):
+ brev = opts['branch'][0]
+ else:
+ brev = branches[0]
+ repo._subtoppath = source
+ try:
+ ret = postincoming(ui, repo, modheads, opts.get('update'),
+ checkout, brev)
+
+ finally:
+ del repo._subtoppath
finally:
other.close()
@@ -4522,8 +4550,7 @@
('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
('r', 'rev', '', _('revert to the specified revision'), _('REV')),
('C', 'no-backup', None, _('do not save backup copies of files')),
- ('i', 'interactive', None,
- _('interactively select the changes (EXPERIMENTAL)')),
+ ('i', 'interactive', None, _('interactively select the changes')),
] + walkopts + dryrunopts,
_('[OPTION]... [-r REV] [NAME]...'))
def revert(ui, repo, *pats, **opts):
@@ -4563,6 +4590,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get("date"):
if opts.get("rev"):
raise error.Abort(_("you can't specify a revision and a date"))
@@ -4574,7 +4602,10 @@
raise error.Abort(_('uncommitted merge with no revision specified'),
hint=_("use 'hg update' or see 'hg help revert'"))
- ctx = scmutil.revsingle(repo, opts.get('rev'))
+ rev = opts.get('rev')
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev)
if (not (pats or opts.get('include') or opts.get('exclude') or
opts.get('all') or opts.get('interactive'))):
@@ -4598,7 +4629,8 @@
hint = _("use --all to revert all files")
raise error.Abort(msg, hint=hint)
- return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
+ return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
+ **pycompat.strkwargs(opts))
@command('rollback', dryrunopts +
[('f', 'force', False, _('ignore safety measures'))])
@@ -4653,7 +4685,7 @@
return repo.rollback(dryrun=opts.get(r'dry_run'),
force=opts.get(r'force'))
-@command('root', [])
+@command('root', [], cmdtype=readonly)
def root(ui, repo):
"""print the root (top) of the current working directory
@@ -4701,7 +4733,7 @@
Please note that the server does not implement access control.
This means that, by default, anybody can read from the server and
- nobody can write to it by default. Set the ``web.allow_push``
+ nobody can write to it by default. Set the ``web.allow-push``
option to ``*`` to allow everybody to push to the server. You
should use a real web server if you need to authenticate users.
@@ -4747,7 +4779,7 @@
('', 'change', '', _('list the changed files of a revision'), _('REV')),
] + walkopts + subrepoopts + formatteropts,
_('[OPTION]... [FILE]...'),
- inferrepo=True)
+ inferrepo=True, cmdtype=readonly)
def status(ui, repo, *pats, **opts):
"""show changed files in the working directory
@@ -4845,9 +4877,11 @@
msg = _('cannot use --terse with --rev')
raise error.Abort(msg)
elif change:
+ repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
node2 = scmutil.revsingle(repo, change, None).node()
node1 = repo[node2].p1().node()
else:
+ repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
node1, node2 = scmutil.revpair(repo, revs)
if pats or ui.configbool('commands', 'status.relative'):
@@ -4912,7 +4946,8 @@
fm.end()
@command('^summary|sum',
- [('', 'remote', None, _('check for push and pull'))], '[--remote]')
+ [('', 'remote', None, _('check for push and pull'))],
+ '[--remote]', cmdtype=readonly)
def summary(ui, repo, **opts):
"""summarize working directory state
@@ -5313,7 +5348,7 @@
finally:
release(lock, wlock)
-@command('tags', formatteropts, '')
+@command('tags', formatteropts, '', cmdtype=readonly)
def tags(ui, repo, **opts):
"""list repository tags
@@ -5510,7 +5545,17 @@
# if we defined a bookmark, we have to remember the original name
brev = rev
- rev = scmutil.revsingle(repo, rev, rev).rev()
+ if rev:
+ repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+ ctx = scmutil.revsingle(repo, rev, rev)
+ rev = ctx.rev()
+ if ctx.hidden():
+ ctxstr = ctx.hex()[:12]
+ ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
+
+ if ctx.obsolete():
+ obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
+ ui.warn("(%s)\n" % obsfatemsg)
repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
@@ -5536,7 +5581,7 @@
"""
return hg.verify(repo)
-@command('version', [] + formatteropts, norepo=True)
+@command('version', [] + formatteropts, norepo=True, cmdtype=readonly)
def version_(ui, **opts):
"""output version and copyright information"""
opts = pycompat.byteskwargs(opts)
@@ -5548,7 +5593,7 @@
util.version())
license = _(
"(see https://mercurial-scm.org for more information)\n"
- "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
+ "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
"This is free software; see the source for copying conditions. "
"There is NO\nwarranty; "
"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
--- a/mercurial/commandserver.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/commandserver.py Mon Jan 22 17:53:02 2018 -0500
@@ -17,11 +17,11 @@
import traceback
from .i18n import _
+from .thirdparty import selectors2
from . import (
encoding,
error,
pycompat,
- selectors2,
util,
)
@@ -247,13 +247,13 @@
req = dispatch.request(args[:], copiedui, self.repo, self.cin,
self.cout, self.cerr)
- ret = (dispatch.dispatch(req) or 0) & 255 # might return None
-
- # restore old cwd
- if '--cwd' in args:
- os.chdir(self.cwd)
-
- self.cresult.write(struct.pack('>i', int(ret)))
+ try:
+ ret = (dispatch.dispatch(req) or 0) & 255 # might return None
+ self.cresult.write(struct.pack('>i', int(ret)))
+ finally:
+ # restore old cwd
+ if '--cwd' in args:
+ os.chdir(self.cwd)
def getencoding(self):
""" writes the current encoding to the result channel """
@@ -449,6 +449,8 @@
def init(self):
self._sock = socket.socket(socket.AF_UNIX)
self._servicehandler.bindsocket(self._sock, self.address)
+ if util.safehasattr(util, 'unblocksignal'):
+ util.unblocksignal(signal.SIGCHLD)
o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
self._oldsigchldhandler = o
self._socketunlinked = False
--- a/mercurial/configitems.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/configitems.py Mon Jan 22 17:53:02 2018 -0500
@@ -362,6 +362,9 @@
coreconfigitem('devel', 'warn-config-unknown',
default=None,
)
+coreconfigitem('devel', 'debug.peer-request',
+ default=False,
+)
coreconfigitem('diff', 'nodates',
default=False,
)
@@ -428,6 +431,9 @@
coreconfigitem('experimental', 'bundle2.pushback',
default=False,
)
+coreconfigitem('experimental', 'bundle2.stream',
+ default=False,
+)
coreconfigitem('experimental', 'bundle2lazylocking',
default=False,
)
@@ -452,6 +458,12 @@
coreconfigitem('experimental', 'crecordtest',
default=None,
)
+coreconfigitem('experimental', 'directaccess',
+ default=False,
+)
+coreconfigitem('experimental', 'directaccess.revnums',
+ default=False,
+)
coreconfigitem('experimental', 'editortmpinhg',
default=False,
)
@@ -469,7 +481,7 @@
default=None,
)
coreconfigitem('experimental', 'evolution.effect-flags',
- default=False,
+ default=True,
alias=[('experimental', 'effect-flags')]
)
coreconfigitem('experimental', 'evolution.exchange',
@@ -478,9 +490,15 @@
coreconfigitem('experimental', 'evolution.bundle-obsmarker',
default=False,
)
+coreconfigitem('experimental', 'evolution.report-instabilities',
+ default=True,
+)
coreconfigitem('experimental', 'evolution.track-operation',
default=True,
)
+coreconfigitem('experimental', 'worddiff',
+ default=False,
+)
coreconfigitem('experimental', 'maxdeltachainspan',
default=-1,
)
@@ -529,15 +547,15 @@
coreconfigitem('experimental', 'obsmarkers-exchange-debug',
default=False,
)
-coreconfigitem('experimental', 'rebase.multidest',
+coreconfigitem('experimental', 'remotenames',
default=False,
)
-coreconfigitem('experimental', 'revertalternateinteractivemode',
- default=True,
-)
coreconfigitem('experimental', 'revlogv2',
default=None,
)
+coreconfigitem('experimental', 'single-head-per-branch',
+ default=False,
+)
coreconfigitem('experimental', 'spacemovesdown',
default=False,
)
@@ -553,6 +571,9 @@
coreconfigitem('experimental', 'treemanifest',
default=False,
)
+coreconfigitem('experimental', 'update.atomic-file',
+ default=False,
+)
coreconfigitem('extensions', '.*',
default=None,
generic=True,
@@ -838,6 +859,9 @@
coreconfigitem('push', 'pushvars.server',
default=False,
)
+coreconfigitem('server', 'bookmarks-pushkey-compat',
+ default=True,
+)
coreconfigitem('server', 'bundle1',
default=True,
)
@@ -1060,6 +1084,9 @@
coreconfigitem('ui', 'ssh',
default='ssh',
)
+coreconfigitem('ui', 'ssherrorhint',
+ default=None,
+)
coreconfigitem('ui', 'statuscopies',
default=False,
)
@@ -1078,6 +1105,9 @@
coreconfigitem('ui', 'timeout',
default='600',
)
+coreconfigitem('ui', 'timeout.warn',
+ default=0,
+)
coreconfigitem('ui', 'traceback',
default=False,
)
@@ -1102,10 +1132,12 @@
coreconfigitem('web', 'allowgz',
default=False,
)
-coreconfigitem('web', 'allowpull',
+coreconfigitem('web', 'allow-pull',
+ alias=[('web', 'allowpull')],
default=True,
)
-coreconfigitem('web', 'allow_push',
+coreconfigitem('web', 'allow-push',
+ alias=[('web', 'allow_push')],
default=list,
)
coreconfigitem('web', 'allowzip',
@@ -1239,6 +1271,9 @@
coreconfigitem('worker', 'backgroundclosethreadcount',
default=4,
)
+coreconfigitem('worker', 'enabled',
+ default=True,
+)
coreconfigitem('worker', 'numcpus',
default=None,
)
@@ -1255,3 +1290,6 @@
coreconfigitem('rebase', 'singletransaction',
default=False,
)
+coreconfigitem('rebase', 'experimental.inmemory',
+ default=False,
+)
--- a/mercurial/context.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/context.py Mon Jan 22 17:53:02 2018 -0500
@@ -36,6 +36,7 @@
match as matchmod,
mdiff,
obsolete as obsmod,
+ obsutil,
patch,
pathutil,
phases,
@@ -354,7 +355,7 @@
ctx2 = self.p1()
if ctx2 is not None:
ctx2 = self._repo[ctx2]
- diffopts = patch.diffopts(self._repo.ui, opts)
+ diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
def dirs(self):
@@ -433,8 +434,20 @@
This is extracted in a function to help extensions (eg: evolve) to
experiment with various message variants."""
if repo.filtername.startswith('visible'):
- msg = _("hidden revision '%s'") % changeid
+
+ # Check if the changeset is obsolete
+ unfilteredrepo = repo.unfiltered()
+ ctx = unfilteredrepo[changeid]
+
+ # If the changeset is obsolete, enrich the message with the reason
+ # that made this changeset not visible
+ if ctx.obsolete():
+ msg = obsutil._getfilteredreason(repo, changeid, ctx)
+ else:
+ msg = _("hidden revision '%s'") % changeid
+
hint = _('use --hidden to access hidden revisions')
+
return error.FilteredRepoLookupError(msg, hint=hint)
msg = _("filtered revision '%s' (not in '%s' subset)")
msg %= (changeid, repo.filtername)
@@ -615,10 +628,13 @@
def closesbranch(self):
return 'close' in self._changeset.extra
def extra(self):
+ """Return a dict of extra information."""
return self._changeset.extra
def tags(self):
+ """Return a list of byte tag names"""
return self._repo.nodetags(self._node)
def bookmarks(self):
+ """Return a list of byte bookmark names."""
return self._repo.nodebookmarks(self._node)
def phase(self):
return self._repo._phasecache.phase(self._repo, self._rev)
@@ -629,7 +645,11 @@
return False
def children(self):
- """return contexts for each child changeset"""
+ """return list of changectx contexts for each child changeset.
+
+ This returns only the immediate child changesets. Use descendants() to
+ recursively walk children.
+ """
c = self._repo.changelog.children(self._node)
return [changectx(self._repo, x) for x in c]
@@ -638,6 +658,10 @@
yield changectx(self._repo, a)
def descendants(self):
+ """Recursively yield all children of the changeset.
+
+ For just the immediate children, use children()
+ """
for d in self._repo.changelog.descendants([self._rev]):
yield changectx(self._repo, d)
@@ -819,6 +843,10 @@
return self._changectx.phase()
def phasestr(self):
return self._changectx.phasestr()
+ def obsolete(self):
+ return self._changectx.obsolete()
+ def instabilities(self):
+ return self._changectx.instabilities()
def manifest(self):
return self._changectx.manifest()
def changectx(self):
@@ -931,6 +959,14 @@
return self.linkrev()
return self._adjustlinkrev(self.rev(), inclusive=True)
+ def introfilectx(self):
+ """Return filectx having identical contents, but pointing to the
+ changeset revision where this filectx was introduced"""
+ introrev = self.introrev()
+ if self.rev() == introrev:
+ return self
+ return self.filectx(self.filenode(), changeid=introrev)
+
def _parentfilectx(self, path, fileid, filelog):
"""create parent filectx keeping ancestry info for _adjustlinkrev()"""
fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
@@ -1021,19 +1057,16 @@
return pl
# use linkrev to find the first changeset where self appeared
- base = self
- introrev = self.introrev()
- if self.rev() != introrev:
- base = self.filectx(self.filenode(), changeid=introrev)
+ base = self.introfilectx()
if getattr(base, '_ancestrycontext', None) is None:
cl = self._repo.changelog
- if introrev is None:
+ if base.rev() is None:
# wctx is not inclusive, but works because _ancestrycontext
# is used to test filelog revisions
ac = cl.ancestors([p.rev() for p in base.parents()],
inclusive=True)
else:
- ac = cl.ancestors([introrev], inclusive=True)
+ ac = cl.ancestors([base.rev()], inclusive=True)
base._ancestrycontext = ac
# This algorithm would prefer to be recursive, but Python is a
@@ -1088,7 +1121,7 @@
hist[f] = curr
del pcache[f]
- return zip(hist[base][0], hist[base][1].splitlines(True))
+ return pycompat.ziplist(hist[base][0], hist[base][1].splitlines(True))
def ancestors(self, followfirst=False):
visit = {}
@@ -1633,9 +1666,6 @@
listsubrepos=listsubrepos, badfn=badfn,
icasefs=icasefs)
- def flushall(self):
- pass # For overlayworkingfilectx compatibility.
-
def _filtersuspectsymlink(self, files):
if not files or self._repo.dirstate._checklink:
return files
@@ -1932,10 +1962,11 @@
"""wraps unlink for a repo's working directory"""
self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
- def write(self, data, flags, backgroundclose=False):
+ def write(self, data, flags, backgroundclose=False, **kwargs):
"""wraps repo.wwrite"""
self._repo.wwrite(self._path, data, flags,
- backgroundclose=backgroundclose)
+ backgroundclose=backgroundclose,
+ **kwargs)
def markcopied(self, src):
"""marks this file a copy of `src`"""
@@ -1959,25 +1990,33 @@
def setflags(self, l, x):
self._repo.wvfs.setflags(self._path, l, x)
-class overlayworkingctx(workingctx):
- """Wraps another mutable context with a write-back cache that can be flushed
- at a later time.
+class overlayworkingctx(committablectx):
+ """Wraps another mutable context with a write-back cache that can be
+ converted into a commit context.
self._cache[path] maps to a dict with keys: {
'exists': bool?
'date': date?
'data': str?
'flags': str?
+ 'copied': str? (path or None)
}
If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
is `False`, the file was deleted.
"""
- def __init__(self, repo, wrappedctx):
+ def __init__(self, repo):
super(overlayworkingctx, self).__init__(repo)
self._repo = repo
+ self.clean()
+
+ def setbase(self, wrappedctx):
self._wrappedctx = wrappedctx
- self._clean()
+ self._parents = [wrappedctx]
+ # Drop old manifest cache as it is now out of date.
+ # This is necessary when, e.g., rebasing several nodes with one
+ # ``overlayworkingctx`` (e.g. with --collapse).
+ util.clearcachedproperty(self, '_manifest')
def data(self, path):
if self.isdirty(path):
@@ -1989,10 +2028,47 @@
return self._wrappedctx[path].data()
else:
raise error.ProgrammingError("No such file or directory: %s" %
- self._path)
+ path)
else:
return self._wrappedctx[path].data()
+ @propertycache
+ def _manifest(self):
+ parents = self.parents()
+ man = parents[0].manifest().copy()
+
+ flag = self._flagfunc
+ for path in self.added():
+ man[path] = addednodeid
+ man.setflag(path, flag(path))
+ for path in self.modified():
+ man[path] = modifiednodeid
+ man.setflag(path, flag(path))
+ for path in self.removed():
+ del man[path]
+ return man
+
+ @propertycache
+ def _flagfunc(self):
+ def f(path):
+ return self._cache[path]['flags']
+ return f
+
+ def files(self):
+ return sorted(self.added() + self.modified() + self.removed())
+
+ def modified(self):
+ return [f for f in self._cache.keys() if self._cache[f]['exists'] and
+ self._existsinparent(f)]
+
+ def added(self):
+ return [f for f in self._cache.keys() if self._cache[f]['exists'] and
+ not self._existsinparent(f)]
+
+ def removed(self):
+ return [f for f in self._cache.keys() if
+ not self._cache[f]['exists'] and self._existsinparent(f)]
+
def isinmemory(self):
return True
@@ -2002,6 +2078,18 @@
else:
return self._wrappedctx[path].date()
+ def markcopied(self, path, origin):
+ if self.isdirty(path):
+ self._cache[path]['copied'] = origin
+ else:
+ raise error.ProgrammingError('markcopied() called on clean context')
+
+ def copydata(self, path):
+ if self.isdirty(path):
+ return self._cache[path]['copied']
+ else:
+ raise error.ProgrammingError('copydata() called on clean context')
+
def flags(self, path):
if self.isdirty(path):
if self._cache[path]['exists']:
@@ -2012,9 +2100,60 @@
else:
return self._wrappedctx[path].flags()
- def write(self, path, data, flags=''):
+ def _existsinparent(self, path):
+ try:
+ # ``commitctx` raises a ``ManifestLookupError`` if a path does not
+ # exist, unlike ``workingctx``, which returns a ``workingfilectx``
+ # with an ``exists()`` function.
+ self._wrappedctx[path]
+ return True
+ except error.ManifestLookupError:
+ return False
+
+ def _auditconflicts(self, path):
+ """Replicates conflict checks done by wvfs.write().
+
+ Since we never write to the filesystem and never call `applyupdates` in
+ IMM, we'll never check that a path is actually writable -- e.g., because
+ it adds `a/foo`, but `a` is actually a file in the other commit.
+ """
+ def fail(path, component):
+ # p1() is the base and we're receiving "writes" for p2()'s
+ # files.
+ if 'l' in self.p1()[component].flags():
+ raise error.Abort("error: %s conflicts with symlink %s "
+ "in %s." % (path, component,
+ self.p1().rev()))
+ else:
+ raise error.Abort("error: '%s' conflicts with file '%s' in "
+ "%s." % (path, component,
+ self.p1().rev()))
+
+ # Test that each new directory to be created to write this path from p2
+ # is not a file in p1.
+ components = path.split('/')
+ for i in xrange(len(components)):
+ component = "/".join(components[0:i])
+ if component in self.p1():
+ fail(path, component)
+
+ # Test the other direction -- that this path from p2 isn't a directory
+ # in p1 (test that p1 doesn't any paths matching `path/*`).
+ match = matchmod.match('/', '', [path + '/'], default=b'relpath')
+ matches = self.p1().manifest().matches(match)
+ if len(matches) > 0:
+ if len(matches) == 1 and matches.keys()[0] == path:
+ return
+ raise error.Abort("error: file '%s' cannot be written because "
+ " '%s/' is a folder in %s (containing %d "
+ "entries: %s)"
+ % (path, path, self.p1(), len(matches),
+ ', '.join(matches.keys())))
+
+ def write(self, path, data, flags='', **kwargs):
if data is None:
raise error.ProgrammingError("data must be non-None")
+ self._auditconflicts(path)
self._markdirty(path, exists=True, data=data, date=util.makedate(),
flags=flags)
@@ -2037,13 +2176,15 @@
return self.exists(self._cache[path]['data'].strip())
else:
return self._cache[path]['exists']
- return self._wrappedctx[path].exists()
+
+ return self._existsinparent(path)
def lexists(self, path):
"""lexists returns True if the path exists"""
if self.isdirty(path):
return self._cache[path]['exists']
- return self._wrappedctx[path].lexists()
+
+ return self._existsinparent(path)
def size(self, path):
if self.isdirty(path):
@@ -2054,48 +2195,90 @@
self._path)
return self._wrappedctx[path].size()
- def flushall(self):
- for path in self._writeorder:
- entry = self._cache[path]
- if entry['exists']:
- self._wrappedctx[path].clearunknown()
- if entry['data'] is not None:
- if entry['flags'] is None:
- raise error.ProgrammingError('data set but not flags')
- self._wrappedctx[path].write(
- entry['data'],
- entry['flags'])
- else:
- self._wrappedctx[path].setflags(
- 'l' in entry['flags'],
- 'x' in entry['flags'])
+ def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
+ user=None, editor=None):
+ """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
+ committed.
+
+ ``text`` is the commit message.
+ ``parents`` (optional) are rev numbers.
+ """
+ # Default parents to the wrapped contexts' if not passed.
+ if parents is None:
+ parents = self._wrappedctx.parents()
+ if len(parents) == 1:
+ parents = (parents[0], None)
+
+ # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
+ if parents[1] is None:
+ parents = (self._repo[parents[0]], None)
+ else:
+ parents = (self._repo[parents[0]], self._repo[parents[1]])
+
+ files = self._cache.keys()
+ def getfile(repo, memctx, path):
+ if self._cache[path]['exists']:
+ return memfilectx(repo, memctx, path,
+ self._cache[path]['data'],
+ 'l' in self._cache[path]['flags'],
+ 'x' in self._cache[path]['flags'],
+ self._cache[path]['copied'])
else:
- self._wrappedctx[path].remove(path)
- self._clean()
+ # Returning None, but including the path in `files`, is
+ # necessary for memctx to register a deletion.
+ return None
+ return memctx(self._repo, parents, text, files, getfile, date=date,
+ extra=extra, user=user, branch=branch, editor=editor)
def isdirty(self, path):
return path in self._cache
- def _clean(self):
+ def isempty(self):
+ # We need to discard any keys that are actually clean before the empty
+ # commit check.
+ self._compact()
+ return len(self._cache) == 0
+
+ def clean(self):
self._cache = {}
- self._writeorder = []
+
+ def _compact(self):
+ """Removes keys from the cache that are actually clean, by comparing
+ them with the underlying context.
+
+ This can occur during the merge process, e.g. by passing --tool :local
+ to resolve a conflict.
+ """
+ keys = []
+ for path in self._cache.keys():
+ cache = self._cache[path]
+ try:
+ underlying = self._wrappedctx[path]
+ if (underlying.data() == cache['data'] and
+ underlying.flags() == cache['flags']):
+ keys.append(path)
+ except error.ManifestLookupError:
+ # Path not in the underlying manifest (created).
+ continue
+
+ for path in keys:
+ del self._cache[path]
+ return keys
def _markdirty(self, path, exists, data=None, date=None, flags=''):
- if path not in self._cache:
- self._writeorder.append(path)
-
self._cache[path] = {
'exists': exists,
'data': data,
'date': date,
'flags': flags,
+ 'copied': None,
}
def filectx(self, path, filelog=None):
return overlayworkingfilectx(self._repo, path, parent=self,
filelog=filelog)
-class overlayworkingfilectx(workingfilectx):
+class overlayworkingfilectx(committablefilectx):
"""Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
cache, which can be flushed through later by calling ``flush()``."""
@@ -2109,7 +2292,7 @@
def cmp(self, fctx):
return self.data() != fctx.data()
- def ctx(self):
+ def changectx(self):
return self._parent
def data(self):
@@ -2125,16 +2308,17 @@
return self._parent.exists(self._path)
def renamed(self):
- # Copies are currently tracked in the dirstate as before. Straight copy
- # from workingfilectx.
- rp = self._repo.dirstate.copied(self._path)
- if not rp:
+ path = self._parent.copydata(self._path)
+ if not path:
return None
- return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
+ return path, self._changectx._parents[0]._manifest.get(path, nullid)
def size(self):
return self._parent.size(self._path)
+ def markcopied(self, origin):
+ self._parent.markcopied(self._path, origin)
+
def audit(self):
pass
@@ -2144,12 +2328,15 @@
def setflags(self, islink, isexec):
return self._parent.setflags(self._path, islink, isexec)
- def write(self, data, flags, backgroundclose=False):
- return self._parent.write(self._path, data, flags)
+ def write(self, data, flags, backgroundclose=False, **kwargs):
+ return self._parent.write(self._path, data, flags, **kwargs)
def remove(self, ignoremissing=False):
return self._parent.remove(self._path)
+ def clearunknown(self):
+ pass
+
class workingcommitctx(workingctx):
"""A workingcommitctx object makes access to data related to
the revision being committed convenient.
@@ -2215,9 +2402,9 @@
copied = fctx.renamed()
if copied:
copied = copied[0]
- return memfilectx(repo, path, fctx.data(),
+ return memfilectx(repo, memctx, path, fctx.data(),
islink=fctx.islink(), isexec=fctx.isexec(),
- copied=copied, memctx=memctx)
+ copied=copied)
return getfilectx
@@ -2231,9 +2418,8 @@
if data is None:
return None
islink, isexec = mode
- return memfilectx(repo, path, data, islink=islink,
- isexec=isexec, copied=copied,
- memctx=memctx)
+ return memfilectx(repo, memctx, path, data, islink=islink,
+ isexec=isexec, copied=copied)
return getfilectx
@@ -2365,8 +2551,8 @@
See memctx and committablefilectx for more details.
"""
- def __init__(self, repo, path, data, islink=False,
- isexec=False, copied=None, memctx=None):
+ def __init__(self, repo, changectx, path, data, islink=False,
+ isexec=False, copied=None):
"""
path is the normalized file path relative to repository root.
data is the file content as a string.
@@ -2374,7 +2560,7 @@
isexec is True if the file is executable.
copied is the source file path if current file was copied in the
revision being committed, or None."""
- super(memfilectx, self).__init__(repo, path, None, memctx)
+ super(memfilectx, self).__init__(repo, path, None, changectx)
self._data = data
self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
self._copied = None
@@ -2389,7 +2575,7 @@
# need to figure out what to do here
del self._changectx[self._path]
- def write(self, data, flags):
+ def write(self, data, flags, **kwargs):
"""wraps repo.wwrite"""
self._data = data
@@ -2598,7 +2784,7 @@
def remove(self):
util.unlink(self._path)
- def write(self, data, flags):
+ def write(self, data, flags, **kwargs):
assert not flags
with open(self._path, "w") as f:
f.write(data)
--- a/mercurial/copies.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/copies.py Mon Jan 22 17:53:02 2018 -0500
@@ -107,7 +107,7 @@
return min(limit, a, b)
def _chain(src, dst, a, b):
- '''chain two sets of copies a->b'''
+ """chain two sets of copies a->b"""
t = a.copy()
for k, v in b.iteritems():
if v in t:
@@ -130,8 +130,8 @@
return t
def _tracefile(fctx, am, limit=-1):
- '''return file context that is the ancestor of fctx present in ancestor
- manifest am, stopping after the first ancestor lower than limit'''
+ """return file context that is the ancestor of fctx present in ancestor
+ manifest am, stopping after the first ancestor lower than limit"""
for f in fctx.ancestors():
if am.get(f.path(), None) == f.filenode():
@@ -139,11 +139,11 @@
if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
return None
-def _dirstatecopies(d):
+def _dirstatecopies(d, match=None):
ds = d._repo.dirstate
c = ds.copies().copy()
for k in list(c):
- if ds[k] not in 'anm':
+ if ds[k] not in 'anm' or (match and not match(k)):
del c[k]
return c
@@ -156,18 +156,8 @@
mb = b.manifest()
return mb.filesnotin(ma, match=match)
-def _forwardcopies(a, b, match=None):
- '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
-
- # check for working copy
- w = None
- if b.rev() is None:
- w = b
- b = w.p1()
- if a == b:
- # short-circuit to avoid issues with merge states
- return _dirstatecopies(w)
-
+def _committedforwardcopies(a, b, match):
+ """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
# files might have to be traced back to the fctx parent of the last
# one-side-only changeset, but not further back than that
limit = _findlimit(a._repo, a.rev(), b.rev())
@@ -199,12 +189,21 @@
ofctx = _tracefile(fctx, am, limit)
if ofctx:
cm[f] = ofctx.path()
+ return cm
- # combine copies from dirstate if necessary
- if w is not None:
- cm = _chain(a, w, cm, _dirstatecopies(w))
+def _forwardcopies(a, b, match=None):
+ """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
- return cm
+ # check for working copy
+ if b.rev() is None:
+ if a == b.p1():
+ # short-circuit to avoid issues with merge states
+ return _dirstatecopies(b, match)
+
+ cm = _committedforwardcopies(a, b.p1(), match)
+ # combine copies from dirstate if necessary
+ return _chain(a, b, cm, _dirstatecopies(b, match))
+ return _committedforwardcopies(a, b, match)
def _backwardrenames(a, b):
if a._repo.ui.config('experimental', 'copytrace') == 'off':
@@ -223,7 +222,7 @@
return r
def pathcopies(x, y, match=None):
- '''find {dst@y: src@x} copy mapping for directed compare'''
+ """find {dst@y: src@x} copy mapping for directed compare"""
if x == y or not x or not y:
return {}
a = y.ancestor(x)
@@ -861,13 +860,13 @@
return
def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
- '''reproduce copies from fromrev to rev in the dirstate
+ """reproduce copies from fromrev to rev in the dirstate
If skiprev is specified, it's a revision that should be used to
filter copy records. Any copies that occur between fromrev and
skiprev will not be duplicated, even if they appear in the set of
copies between fromrev and rev.
- '''
+ """
exclude = {}
if (skiprev is not None and
repo.ui.config('experimental', 'copytrace') != 'off'):
--- a/mercurial/crecord.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/crecord.py Mon Jan 22 17:53:02 2018 -0500
@@ -555,7 +555,7 @@
return chunkselector.opts
_headermessages = { # {operation: text}
- 'revert': _('Select hunks to revert'),
+ 'apply': _('Select hunks to apply'),
'discard': _('Select hunks to discard'),
None: _('Select hunks to record'),
}
@@ -581,6 +581,13 @@
# maps custom nicknames of color-pairs to curses color-pair values
self.colorpairnames = {}
+ # Honor color setting of ui section. Keep colored setup as
+ # long as not explicitly set to a falsy value - especially,
+ # when not set at all. This is to stay most compatible with
+ # previous (color only) behaviour.
+ uicolor = util.parsebool(self.ui.config('ui', 'color'))
+ self.usecolor = uicolor is not False
+
# the currently selected header, hunk, or hunk-line
self.currentselecteditem = self.headerlist[0]
@@ -1371,11 +1378,19 @@
colorpair = self.colorpairs[(fgcolor, bgcolor)]
else:
pairindex = len(self.colorpairs) + 1
- curses.init_pair(pairindex, fgcolor, bgcolor)
- colorpair = self.colorpairs[(fgcolor, bgcolor)] = (
- curses.color_pair(pairindex))
- if name is not None:
- self.colorpairnames[name] = curses.color_pair(pairindex)
+ if self.usecolor:
+ curses.init_pair(pairindex, fgcolor, bgcolor)
+ colorpair = self.colorpairs[(fgcolor, bgcolor)] = (
+ curses.color_pair(pairindex))
+ if name is not None:
+ self.colorpairnames[name] = curses.color_pair(pairindex)
+ else:
+ cval = 0
+ if name is not None:
+ if name == 'selected':
+ cval = curses.A_REVERSE
+ self.colorpairnames[name] = cval
+ colorpair = self.colorpairs[(fgcolor, bgcolor)] = cval
# add attributes if possible
if attrlist is None:
@@ -1704,7 +1719,10 @@
self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
curses.start_color()
- curses.use_default_colors()
+ try:
+ curses.use_default_colors()
+ except curses.error:
+ self.usecolor = False
# available colors: black, blue, cyan, green, magenta, white, yellow
# init_pair(color_id, foreground_color, background_color)
--- a/mercurial/dagop.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/dagop.py Mon Jan 22 17:53:02 2018 -0500
@@ -75,6 +75,46 @@
if prev != node.nullrev:
heapq.heappush(pendingheap, (heapsign * prev, pdepth))
+def filectxancestors(fctxs, followfirst=False):
+ """Like filectx.ancestors(), but can walk from multiple files/revisions,
+ and includes the given fctxs themselves
+
+ Yields (rev, {fctx, ...}) pairs in descending order.
+ """
+ visit = {}
+ visitheap = []
+ def addvisit(fctx):
+ rev = fctx.rev()
+ if rev not in visit:
+ visit[rev] = set()
+ heapq.heappush(visitheap, -rev) # max heap
+ visit[rev].add(fctx)
+
+ if followfirst:
+ cut = 1
+ else:
+ cut = None
+
+ for c in fctxs:
+ addvisit(c)
+ while visit:
+ currev = -heapq.heappop(visitheap)
+ curfctxs = visit.pop(currev)
+ yield currev, curfctxs
+ for c in curfctxs:
+ for parent in c.parents()[:cut]:
+ addvisit(parent)
+ assert not visitheap
+
+def filerevancestors(fctxs, followfirst=False):
+ """Like filectx.ancestors(), but can walk from multiple files/revisions,
+ and includes the given fctxs themselves
+
+ Returns a smartset.
+ """
+ gen = (rev for rev, _cs in filectxancestors(fctxs, followfirst))
+ return generatorset(gen, iterasc=False)
+
def _genrevancestors(repo, revs, followfirst, startdepth, stopdepth, cutfunc):
if followfirst:
cut = 1
@@ -251,9 +291,7 @@
`fromline`-`toline` range.
"""
diffopts = patch.diffopts(fctx._repo.ui)
- introrev = fctx.introrev()
- if fctx.rev() != introrev:
- fctx = fctx.filectx(fctx.filenode(), changeid=introrev)
+ fctx = fctx.introfilectx()
visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
while visit:
c, linerange2 = visit.pop(max(visit))
--- a/mercurial/dagutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/dagutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -154,8 +154,9 @@
class revlogdag(revlogbaseddag):
'''dag interface to a revlog'''
- def __init__(self, revlog):
+ def __init__(self, revlog, localsubset=None):
revlogbaseddag.__init__(self, revlog, set(revlog))
+ self._heads = localsubset
def _getheads(self):
return [r for r in self._revlog.headrevs() if r != nullrev]
--- a/mercurial/debugcommands.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/debugcommands.py Mon Jan 22 17:53:02 2018 -0500
@@ -69,6 +69,7 @@
templater,
treediscovery,
upgrade,
+ url as urlmod,
util,
vfs as vfsmod,
)
@@ -179,11 +180,11 @@
ui.progress(_('building'), id, unit=_('revisions'), total=total)
for type, data in dagparser.parsedag(text):
if type == 'n':
- ui.note(('node %s\n' % str(data)))
+ ui.note(('node %s\n' % pycompat.bytestr(data)))
id, ps = data
files = []
- fctxs = {}
+ filecontent = {}
p2 = None
if mergeable_file:
@@ -204,27 +205,30 @@
ml[id * linesperrev] += " r%i" % id
mergedtext = "\n".join(ml)
files.append(fn)
- fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
+ filecontent[fn] = mergedtext
if overwritten_file:
fn = "of"
files.append(fn)
- fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
+ filecontent[fn] = "r%i\n" % id
if new_file:
fn = "nf%i" % id
files.append(fn)
- fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
+ filecontent[fn] = "r%i\n" % id
if len(ps) > 1:
if not p2:
p2 = repo[ps[1]]
for fn in p2:
if fn.startswith("nf"):
files.append(fn)
- fctxs[fn] = p2[fn]
+ filecontent[fn] = p2[fn].data()
def fctxfn(repo, cx, path):
- return fctxs.get(path)
+ if path in filecontent:
+ return context.memfilectx(repo, cx, path,
+ filecontent[path])
+ return None
if len(ps) == 0 or ps[0] < 0:
pars = [None, None]
@@ -296,7 +300,7 @@
msg %= indent_string, exc.version, len(data)
ui.write(msg)
else:
- msg = "%sversion: %s (%d bytes)\n"
+ msg = "%sversion: %d (%d bytes)\n"
msg %= indent_string, version, len(data)
ui.write(msg)
fm = ui.formatter('debugobsolete', opts)
@@ -360,6 +364,25 @@
return _debugbundle2(ui, gen, all=all, **opts)
_debugchangegroup(ui, gen, all=all, **opts)
+@command('debugcapabilities',
+ [], _('PATH'),
+ norepo=True)
+def debugcapabilities(ui, path, **opts):
+ """lists the capabilities of a remote peer"""
+ opts = pycompat.byteskwargs(opts)
+ peer = hg.peer(ui, opts, path)
+ caps = peer.capabilities()
+ ui.write(('Main capabilities:\n'))
+ for c in sorted(caps):
+ ui.write((' %s\n') % c)
+ b2caps = bundle2.bundle2caps(peer)
+ if b2caps:
+ ui.write(('Bundle2 capabilities:\n'))
+ for key, values in sorted(b2caps.iteritems()):
+ ui.write((' %s\n') % key)
+ for v in values:
+ ui.write((' %s\n') % v)
+
@command('debugcheckstate', [], '')
def debugcheckstate(ui, repo):
"""validate the correctness of the current dirstate"""
@@ -569,11 +592,23 @@
the delta chain for this revision
:``extraratio``: extradist divided by chainsize; another representation of
how much unrelated data is needed to load this delta chain
+
+ If the repository is configured to use the sparse read, additional keywords
+ are available:
+
+ :``readsize``: total size of data read from the disk for a revision
+ (sum of the sizes of all the blocks)
+ :``largestblock``: size of the largest block of data read from the disk
+ :``readdensity``: density of useful bytes in the data read from the disk
+ :``srchunks``: in how many data hunks the whole revision would be read
+
+ The sparse read can be enabled with experimental.sparse-read = True
"""
opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
index = r.index
generaldelta = r.version & revlog.FLAG_GENERALDELTA
+ withsparseread = getattr(r, '_withsparseread', False)
def revinfo(rev):
e = index[rev]
@@ -609,15 +644,20 @@
fm.plain(' rev chain# chainlen prev delta '
'size rawsize chainsize ratio lindist extradist '
- 'extraratio\n')
+ 'extraratio')
+ if withsparseread:
+ fm.plain(' readsize largestblk rddensity srchunks')
+ fm.plain('\n')
chainbases = {}
for rev in r:
comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
chainbase = chain[0]
chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
- basestart = r.start(chainbase)
- revstart = r.start(rev)
+ start = r.start
+ length = r.length
+ basestart = start(chainbase)
+ revstart = start(rev)
lineardist = revstart + comp - basestart
extradist = lineardist - chainsize
try:
@@ -632,7 +672,7 @@
fm.write('rev chainid chainlen prevrev deltatype compsize '
'uncompsize chainsize chainratio lindist extradist '
'extraratio',
- '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
+ '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
rev, chainid, len(chain), prevrev, deltatype, comp,
uncomp, chainsize, chainratio, lineardist, extradist,
extraratio,
@@ -641,6 +681,29 @@
uncompsize=uncomp, chainsize=chainsize,
chainratio=chainratio, lindist=lineardist,
extradist=extradist, extraratio=extraratio)
+ if withsparseread:
+ readsize = 0
+ largestblock = 0
+ srchunks = 0
+
+ for revschunk in revlog._slicechunk(r, chain):
+ srchunks += 1
+ blkend = start(revschunk[-1]) + length(revschunk[-1])
+ blksize = blkend - start(revschunk[0])
+
+ readsize += blksize
+ if largestblock < blksize:
+ largestblock = blksize
+
+ readdensity = float(chainsize) / float(readsize)
+
+ fm.write('readsize largestblock readdensity srchunks',
+ ' %10d %10d %9.5f %8d',
+ readsize, largestblock, readdensity, srchunks,
+ readsize=readsize, largestblock=largestblock,
+ readdensity=readdensity, srchunks=srchunks)
+
+ fm.plain('\n')
fm.end()
@@ -665,8 +728,9 @@
elif nodates:
timestr = 'set '
else:
- timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
+ timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
time.localtime(ent[3]))
+ timestr = encoding.strtolocal(timestr)
if ent[1] & 0o20000:
mode = 'lnk'
else:
@@ -679,24 +743,21 @@
[('', 'old', None, _('use old-style discovery')),
('', 'nonheads', None,
_('use old-style discovery with non-heads included')),
+ ('', 'rev', [], 'restrict discovery to this set of revs'),
] + cmdutil.remoteopts,
- _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
+ _('[--rev REV] [OTHER]'))
def debugdiscovery(ui, repo, remoteurl="default", **opts):
"""runs the changeset discovery protocol in isolation"""
opts = pycompat.byteskwargs(opts)
- remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
- opts.get('branch'))
+ remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
remote = hg.peer(repo, opts, remoteurl)
ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
# make sure tests are repeatable
random.seed(12323)
- def doit(localheads, remoteheads, remote=remote):
+ def doit(pushedrevs, remoteheads, remote=remote):
if opts.get('old'):
- if localheads:
- raise error.Abort('cannot use localheads with old style '
- 'discovery')
if not util.safehasattr(remote, 'branches'):
# enable in-client legacy support
remote = localrepo.locallegacypeer(remote.local())
@@ -710,7 +771,12 @@
all = dag.ancestorset(dag.internalizeall(common))
common = dag.externalizeall(dag.headsetofconnecteds(all))
else:
- common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
+ nodes = None
+ if pushedrevs:
+ revs = scmutil.revrange(repo, pushedrevs)
+ nodes = [repo[r].node() for r in revs]
+ common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
+ ancestorsof=nodes)
common = set(common)
rheads = set(hds)
lheads = set(repo.heads())
@@ -721,26 +787,33 @@
elif rheads <= common:
ui.write(("remote is subset\n"))
- serverlogs = opts.get('serverlog')
- if serverlogs:
- for filename in serverlogs:
- with open(filename, 'r') as logfile:
- line = logfile.readline()
- while line:
- parts = line.strip().split(';')
- op = parts[1]
- if op == 'cg':
- pass
- elif op == 'cgss':
- doit(parts[2].split(' '), parts[3].split(' '))
- elif op == 'unb':
- doit(parts[3].split(' '), parts[2].split(' '))
- line = logfile.readline()
- else:
- remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
- opts.get('remote_head'))
- localrevs = opts.get('local_head')
- doit(localrevs, remoterevs)
+ remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
+ localrevs = opts['rev']
+ doit(localrevs, remoterevs)
+
+_chunksize = 4 << 10
+
+@command('debugdownload',
+ [
+ ('o', 'output', '', _('path')),
+ ],
+ optionalrepo=True)
+def debugdownload(ui, repo, url, output=None, **opts):
+ """download a resource using Mercurial logic and config
+ """
+ fh = urlmod.open(ui, url, output)
+
+ dest = ui
+ if output:
+ dest = open(output, "wb", _chunksize)
+ try:
+ data = fh.read(_chunksize)
+ while data:
+ dest.write(data)
+ data = fh.read(_chunksize)
+ finally:
+ if output:
+ dest.close()
@command('debugextensions', cmdutil.formatteropts, [], norepo=True)
def debugextensions(ui, **opts):
@@ -801,9 +874,74 @@
for f in ctx.getfileset(expr):
ui.write("%s\n" % f)
+@command('debugformat',
+ [] + cmdutil.formatteropts,
+ _(''))
+def debugformat(ui, repo, **opts):
+ """display format information about the current repository
+
+ Use --verbose to get extra information about current config value and
+ Mercurial default."""
+ opts = pycompat.byteskwargs(opts)
+ maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
+ maxvariantlength = max(len('format-variant'), maxvariantlength)
+
+ def makeformatname(name):
+ return '%s:' + (' ' * (maxvariantlength - len(name)))
+
+ fm = ui.formatter('debugformat', opts)
+ if fm.isplain():
+ def formatvalue(value):
+ if util.safehasattr(value, 'startswith'):
+ return value
+ if value:
+ return 'yes'
+ else:
+ return 'no'
+ else:
+ formatvalue = pycompat.identity
+
+ fm.plain('format-variant')
+ fm.plain(' ' * (maxvariantlength - len('format-variant')))
+ fm.plain(' repo')
+ if ui.verbose:
+ fm.plain(' config default')
+ fm.plain('\n')
+ for fv in upgrade.allformatvariant:
+ fm.startitem()
+ repovalue = fv.fromrepo(repo)
+ configvalue = fv.fromconfig(repo)
+
+ if repovalue != configvalue:
+ namelabel = 'formatvariant.name.mismatchconfig'
+ repolabel = 'formatvariant.repo.mismatchconfig'
+ elif repovalue != fv.default:
+ namelabel = 'formatvariant.name.mismatchdefault'
+ repolabel = 'formatvariant.repo.mismatchdefault'
+ else:
+ namelabel = 'formatvariant.name.uptodate'
+ repolabel = 'formatvariant.repo.uptodate'
+
+ fm.write('name', makeformatname(fv.name), fv.name,
+ label=namelabel)
+ fm.write('repo', ' %3s', formatvalue(repovalue),
+ label=repolabel)
+ if fv.default != configvalue:
+ configlabel = 'formatvariant.config.special'
+ else:
+ configlabel = 'formatvariant.config.default'
+ fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
+ label=configlabel)
+ fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
+ label='formatvariant.default')
+ fm.plain('\n')
+ fm.end()
+
@command('debugfsinfo', [], _('[PATH]'), norepo=True)
def debugfsinfo(ui, path="."):
"""show information detected about current filesystem"""
+ ui.write(('path: %s\n') % path)
+ ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
@@ -1066,6 +1204,11 @@
fm.formatlist([e.name() for e in wirecompengines
if e.wireprotosupport()],
name='compengine', fmt='%s', sep=', '))
+ re2 = 'missing'
+ if util._re2:
+ re2 = 'available'
+ fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
+ fm.data(re2=bool(util._re2))
# templates
p = templater.templatepaths()
@@ -1155,7 +1298,10 @@
@command('debuglocks',
[('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
('W', 'force-wlock', None,
- _('free the working state lock (DANGEROUS)'))],
+ _('free the working state lock (DANGEROUS)')),
+ ('s', 'set-lock', None, _('set the store lock until stopped')),
+ ('S', 'set-wlock', None,
+ _('set the working state lock until stopped'))],
_('[OPTION]...'))
def debuglocks(ui, repo, **opts):
"""show or modify state of locks
@@ -1174,6 +1320,10 @@
instance, on a shared filesystem). Removing locks may also be
blocked by filesystem permissions.
+ Setting a lock will prevent other commands from changing the data.
+ The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
+ The set locks are removed when the command exits.
+
Returns 0 if no locks are held.
"""
@@ -1182,9 +1332,27 @@
repo.svfs.unlink('lock')
if opts.get(r'force_wlock'):
repo.vfs.unlink('wlock')
- if opts.get(r'force_lock') or opts.get(r'force_lock'):
+ if opts.get(r'force_lock') or opts.get(r'force_wlock'):
return 0
+ locks = []
+ try:
+ if opts.get(r'set_wlock'):
+ try:
+ locks.append(repo.wlock(False))
+ except error.LockHeld:
+ raise error.Abort(_('wlock is already held'))
+ if opts.get(r'set_lock'):
+ try:
+ locks.append(repo.lock(False))
+ except error.LockHeld:
+ raise error.Abort(_('lock is already held'))
+ if len(locks):
+ ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
+ return 0
+ finally:
+ release(*locks)
+
now = time.time()
held = 0
@@ -2170,15 +2338,11 @@
cache = {}
ctx2str = str
node2str = short
- if ui.debug():
- def ctx2str(ctx):
- return ctx.hex()
- node2str = hex
for rev in scmutil.revrange(repo, revs):
ctx = repo[rev]
ui.write('%s\n'% ctx2str(ctx))
for succsset in obsutil.successorssets(repo, ctx.node(),
- closest=opts['closest'],
+ closest=opts[r'closest'],
cache=cache):
if succsset:
ui.write(' ')
@@ -2228,8 +2392,8 @@
ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
if revs is None:
- t = formatter.maketemplater(ui, tmpl)
- props['ui'] = ui
+ tres = formatter.templateresources(ui, repo)
+ t = formatter.maketemplater(ui, tmpl, resources=tres)
ui.write(t.render(props))
else:
displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
@@ -2304,6 +2468,7 @@
for k, v in opts.iteritems():
if v:
args[k] = v
+ args = pycompat.strkwargs(args)
# run twice to check that we don't mess up the stream for the next command
res1 = repo.debugwireargs(*vals, **args)
res2 = repo.debugwireargs(*vals, **args)
--- a/mercurial/dirstate.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/dirstate.py Mon Jan 22 17:53:02 2018 -0500
@@ -80,6 +80,7 @@
self._plchangecallbacks = {}
self._origpl = None
self._updatedfiles = set()
+ self._mapcls = dirstatemap
@contextlib.contextmanager
def parentchange(self):
@@ -127,9 +128,8 @@
@propertycache
def _map(self):
- '''Return the dirstate contents as a map from filename to
- (state, mode, size, time).'''
- self._map = dirstatemap(self._ui, self._opener, self._root)
+ """Return the dirstate contents (see documentation for dirstatemap)."""
+ self._map = self._mapcls(self._ui, self._opener, self._root)
return self._map
@property
@@ -158,8 +158,8 @@
def _pl(self):
return self._map.parents()
- def dirs(self):
- return self._map.dirs
+ def hasdir(self, d):
+ return self._map.hastrackeddir(d)
@rootcache('.hgignore')
def _ignore(self):
@@ -387,40 +387,23 @@
def copies(self):
return self._map.copymap
- def _droppath(self, f):
- if self[f] not in "?r" and "dirs" in self._map.__dict__:
- self._map.dirs.delpath(f)
-
- if "filefoldmap" in self._map.__dict__:
- normed = util.normcase(f)
- if normed in self._map.filefoldmap:
- del self._map.filefoldmap[normed]
-
- self._updatedfiles.add(f)
-
def _addpath(self, f, state, mode, size, mtime):
oldstate = self[f]
if state == 'a' or oldstate == 'r':
scmutil.checkfilename(f)
- if f in self._map.dirs:
+ if self._map.hastrackeddir(f):
raise error.Abort(_('directory %r already in dirstate') % f)
# shadows
for d in util.finddirs(f):
- if d in self._map.dirs:
+ if self._map.hastrackeddir(d):
break
entry = self._map.get(d)
if entry is not None and entry[0] != 'r':
raise error.Abort(
_('file %r in dirstate clashes with %r') % (d, f))
- if oldstate in "?r" and "dirs" in self._map.__dict__:
- self._map.dirs.addpath(f)
self._dirty = True
self._updatedfiles.add(f)
- self._map[f] = dirstatetuple(state, mode, size, mtime)
- if state != 'n' or mtime == -1:
- self._map.nonnormalset.add(f)
- if size == -2:
- self._map.otherparentset.add(f)
+ self._map.addfile(f, oldstate, state, mode, size, mtime)
def normal(self, f):
'''Mark a file normal and clean.'''
@@ -458,8 +441,6 @@
return
self._addpath(f, 'n', 0, -1, -1)
self._map.copymap.pop(f, None)
- if f in self._map.nonnormalset:
- self._map.nonnormalset.remove(f)
def otherparent(self, f):
'''Mark as coming from the other parent, always dirty.'''
@@ -482,7 +463,7 @@
def remove(self, f):
'''Mark a file removed.'''
self._dirty = True
- self._droppath(f)
+ oldstate = self[f]
size = 0
if self._pl[1] != nullid:
entry = self._map.get(f)
@@ -493,8 +474,8 @@
elif entry[0] == 'n' and entry[2] == -2: # other parent
size = -2
self._map.otherparentset.add(f)
- self._map[f] = dirstatetuple('r', 0, size, 0)
- self._map.nonnormalset.add(f)
+ self._updatedfiles.add(f)
+ self._map.removefile(f, oldstate, size)
if size == 0:
self._map.copymap.pop(f, None)
@@ -506,12 +487,10 @@
def drop(self, f):
'''Drop a file from the dirstate'''
- if f in self._map:
+ oldstate = self[f]
+ if self._map.dropfile(f, oldstate):
self._dirty = True
- self._droppath(f)
- del self._map[f]
- if f in self._map.nonnormalset:
- self._map.nonnormalset.remove(f)
+ self._updatedfiles.add(f)
self._map.copymap.pop(f, None)
def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
@@ -635,12 +614,7 @@
# emulate dropping timestamp in 'parsers.pack_dirstate'
now = _getfsnow(self._opener)
- dmap = self._map
- for f in self._updatedfiles:
- e = dmap.get(f)
- if e is not None and e[0] == 'n' and e[3] == now:
- dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
- self._map.nonnormalset.add(f)
+ self._map.clearambiguoustimes(self._updatedfiles, now)
# emulate that all 'dirstate.normal' results are written out
self._lastnormaltime = 0
@@ -797,7 +771,6 @@
results = dict.fromkeys(subrepos)
results['.hg'] = None
- alldirs = None
for ff in files:
# constructing the foldmap is expensive, so don't do it for the
# common case where files is ['.']
@@ -828,9 +801,7 @@
if nf in dmap: # does it exactly match a missing file?
results[nf] = None
else: # does it match a missing directory?
- if alldirs is None:
- alldirs = util.dirs(dmap._map)
- if nf in alldirs:
+ if self._map.hasdir(nf):
if matchedir:
matchedir(nf)
notfoundadd(nf)
@@ -1198,6 +1169,39 @@
self._opener.unlink(backupname)
class dirstatemap(object):
+ """Map encapsulating the dirstate's contents.
+
+ The dirstate contains the following state:
+
+ - `identity` is the identity of the dirstate file, which can be used to
+ detect when changes have occurred to the dirstate file.
+
+ - `parents` is a pair containing the parents of the working copy. The
+ parents are updated by calling `setparents`.
+
+ - the state map maps filenames to tuples of (state, mode, size, mtime),
+ where state is a single character representing 'normal', 'added',
+ 'removed', or 'merged'. It is read by treating the dirstate as a
+ dict. File state is updated by calling the `addfile`, `removefile` and
+ `dropfile` methods.
+
+ - `copymap` maps destination filenames to their source filename.
+
+ The dirstate also provides the following views onto the state:
+
+ - `nonnormalset` is a set of the filenames that have state other
+ than 'normal', or are normal but have an mtime of -1 ('normallookup').
+
+ - `otherparentset` is a set of the filenames that are marked as coming
+ from the second parent when the dirstate is currently being merged.
+
+ - `filefoldmap` is a dict mapping normalized filenames to the denormalized
+ form that they appear as in the dirstate.
+
+ - `dirfoldmap` is a dict mapping normalized directory names to the
+ denormalized form that they appear as in the dirstate.
+ """
+
def __init__(self, ui, opener, root):
self._ui = ui
self._opener = opener
@@ -1226,6 +1230,12 @@
self._map.clear()
self.copymap.clear()
self.setparents(nullid, nullid)
+ util.clearcachedproperty(self, "_dirs")
+ util.clearcachedproperty(self, "_alldirs")
+ util.clearcachedproperty(self, "filefoldmap")
+ util.clearcachedproperty(self, "dirfoldmap")
+ util.clearcachedproperty(self, "nonnormalset")
+ util.clearcachedproperty(self, "otherparentset")
def iteritems(self):
return self._map.iteritems()
@@ -1242,15 +1252,9 @@
def __contains__(self, key):
return key in self._map
- def __setitem__(self, key, value):
- self._map[key] = value
-
def __getitem__(self, key):
return self._map[key]
- def __delitem__(self, key):
- del self._map[key]
-
def keys(self):
return self._map.keys()
@@ -1258,6 +1262,60 @@
"""Loads the underlying data, if it's not already loaded"""
self._map
+ def addfile(self, f, oldstate, state, mode, size, mtime):
+ """Add a tracked file to the dirstate."""
+ if oldstate in "?r" and "_dirs" in self.__dict__:
+ self._dirs.addpath(f)
+ if oldstate == "?" and "_alldirs" in self.__dict__:
+ self._alldirs.addpath(f)
+ self._map[f] = dirstatetuple(state, mode, size, mtime)
+ if state != 'n' or mtime == -1:
+ self.nonnormalset.add(f)
+ if size == -2:
+ self.otherparentset.add(f)
+
+ def removefile(self, f, oldstate, size):
+ """
+ Mark a file as removed in the dirstate.
+
+ The `size` parameter is used to store sentinel values that indicate
+ the file's previous state. In the future, we should refactor this
+ to be more explicit about what that state is.
+ """
+ if oldstate not in "?r" and "_dirs" in self.__dict__:
+ self._dirs.delpath(f)
+ if oldstate == "?" and "_alldirs" in self.__dict__:
+ self._alldirs.addpath(f)
+ if "filefoldmap" in self.__dict__:
+ normed = util.normcase(f)
+ self.filefoldmap.pop(normed, None)
+ self._map[f] = dirstatetuple('r', 0, size, 0)
+ self.nonnormalset.add(f)
+
+ def dropfile(self, f, oldstate):
+ """
+ Remove a file from the dirstate. Returns True if the file was
+ previously recorded.
+ """
+ exists = self._map.pop(f, None) is not None
+ if exists:
+ if oldstate != "r" and "_dirs" in self.__dict__:
+ self._dirs.delpath(f)
+ if "_alldirs" in self.__dict__:
+ self._alldirs.delpath(f)
+ if "filefoldmap" in self.__dict__:
+ normed = util.normcase(f)
+ self.filefoldmap.pop(normed, None)
+ self.nonnormalset.discard(f)
+ return exists
+
+ def clearambiguoustimes(self, files, now):
+ for f in files:
+ e = self.get(f)
+ if e is not None and e[0] == 'n' and e[3] == now:
+ self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
+ self.nonnormalset.add(f)
+
def nonnormalentries(self):
'''Compute the nonnormal dirstate entries from the dmap'''
try:
@@ -1293,13 +1351,28 @@
f['.'] = '.' # prevents useless util.fspath() invocation
return f
+ def hastrackeddir(self, d):
+ """
+ Returns True if the dirstate contains a tracked (not removed) file
+ in this directory.
+ """
+ return d in self._dirs
+
+ def hasdir(self, d):
+ """
+ Returns True if the dirstate contains a file (tracked or removed)
+ in this directory.
+ """
+ return d in self._alldirs
+
@propertycache
- def dirs(self):
- """Returns a set-like object containing all the directories in the
- current dirstate.
- """
+ def _dirs(self):
return util.dirs(self._map, 'r')
+ @propertycache
+ def _alldirs(self):
+ return util.dirs(self._map)
+
def _opendirstatefile(self):
fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
if self._pendingmode is not None and self._pendingmode != mode:
@@ -1387,8 +1460,6 @@
# Avoid excess attribute lookups by fast pathing certain checks
self.__contains__ = self._map.__contains__
self.__getitem__ = self._map.__getitem__
- self.__setitem__ = self._map.__setitem__
- self.__delitem__ = self._map.__delitem__
self.get = self._map.get
def write(self, st, now):
@@ -1419,6 +1490,6 @@
def dirfoldmap(self):
f = {}
normcase = util.normcase
- for name in self.dirs:
+ for name in self._dirs:
f[normcase(name)] = name
return f
--- a/mercurial/discovery.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/discovery.py Mon Jan 22 17:53:02 2018 -0500
@@ -21,12 +21,13 @@
branchmap,
error,
phases,
+ scmutil,
setdiscovery,
treediscovery,
util,
)
-def findcommonincoming(repo, remote, heads=None, force=False):
+def findcommonincoming(repo, remote, heads=None, force=False, ancestorsof=None):
"""Return a tuple (common, anyincoming, heads) used to identify the common
subset of nodes between repo and remote.
@@ -37,6 +38,9 @@
changegroupsubset. No code except for pull should be relying on this fact
any longer.
"heads" is either the supplied heads, or else the remote's heads.
+ "ancestorsof" if not None, restrict the discovery to a subset defined by
+ these nodes. Changeset outside of this set won't be considered (and
+ won't appears in "common")
If you pass heads and they are all known locally, the response lists just
these heads in "common" and in "heads".
@@ -59,7 +63,8 @@
return (heads, False, heads)
res = setdiscovery.findcommonheads(repo.ui, repo, remote,
- abortwhenunrelated=not force)
+ abortwhenunrelated=not force,
+ ancestorsof=ancestorsof)
common, anyinc, srvheads = res
return (list(common), anyinc, heads or list(srvheads))
@@ -141,7 +146,8 @@
# get common set if not provided
if commoninc is None:
- commoninc = findcommonincoming(repo, other, force=force)
+ commoninc = findcommonincoming(repo, other, force=force,
+ ancestorsof=onlyheads)
og.commonheads, _any, _hds = commoninc
# compute outgoing
@@ -365,11 +371,8 @@
if None in unsyncedheads:
# old remote, no heads data
heads = None
- elif len(unsyncedheads) <= 4 or repo.ui.verbose:
- heads = ' '.join(short(h) for h in unsyncedheads)
else:
- heads = (' '.join(short(h) for h in unsyncedheads[:4]) +
- ' ' + _("and %s others") % (len(unsyncedheads) - 4))
+ heads = scmutil.nodesummaries(repo, unsyncedheads)
if heads is None:
repo.ui.status(_("remote has heads that are "
"not known locally\n"))
--- a/mercurial/dispatch.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/dispatch.py Mon Jan 22 17:53:02 2018 -0500
@@ -55,7 +55,7 @@
self.fout = fout
self.ferr = ferr
- # remember options pre-parsed by _earlyreqopt*()
+ # remember options pre-parsed by _earlyparseopts()
self.earlyoptions = {}
# reposetups which run before extensions, useful for chg to pre-fill
@@ -96,10 +96,16 @@
err = e
status = -1
if util.safehasattr(req.ui, 'ferr'):
- if err is not None and err.errno != errno.EPIPE:
- req.ui.ferr.write('abort: %s\n' %
- encoding.strtolocal(err.strerror))
- req.ui.ferr.flush()
+ try:
+ if err is not None and err.errno != errno.EPIPE:
+ req.ui.ferr.write('abort: %s\n' %
+ encoding.strtolocal(err.strerror))
+ req.ui.ferr.flush()
+ # There's not much we can do about an I/O error here. So (possibly)
+ # change the status code and move on.
+ except IOError:
+ status = -1
+
sys.exit(status & 255)
def _initstdio():
@@ -150,9 +156,8 @@
try:
if not req.ui:
req.ui = uimod.ui.load()
- if req.ui.plain('strictflags'):
- req.earlyoptions.update(_earlyparseopts(req.args))
- if _earlyreqoptbool(req, 'traceback', ['--traceback']):
+ req.earlyoptions.update(_earlyparseopts(req.ui, req.args))
+ if req.earlyoptions['traceback']:
req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
# set ui streams from the request
@@ -201,7 +206,8 @@
req.ui.flush()
if req.ui.logblockedtimes:
req.ui._blockedtimes['command_duration'] = duration * 1000
- req.ui.log('uiblocked', 'ui blocked ms', **req.ui._blockedtimes)
+ req.ui.log('uiblocked', 'ui blocked ms',
+ **pycompat.strkwargs(req.ui._blockedtimes))
req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n",
msg, ret or 0, duration)
try:
@@ -266,8 +272,7 @@
# read --config before doing anything else
# (e.g. to change trust settings for reading .hg/hgrc)
- cfgs = _parseconfig(req.ui,
- _earlyreqopt(req, 'config', ['--config']))
+ cfgs = _parseconfig(req.ui, req.earlyoptions['config'])
if req.repo:
# copy configs that were passed on the cmdline (--config) to
@@ -281,7 +286,7 @@
if not debugger or ui.plain():
# if we are in HGPLAIN mode, then disable custom debugging
debugger = 'pdb'
- elif _earlyreqoptbool(req, 'debugger', ['--debugger']):
+ elif req.earlyoptions['debugger']:
# This import can be slow for fancy debuggers, so only
# do it when absolutely necessary, i.e. when actual
# debugging has been requested
@@ -295,7 +300,7 @@
debugmortem[debugger] = debugmod.post_mortem
# enter the debugger before command execution
- if _earlyreqoptbool(req, 'debugger', ['--debugger']):
+ if req.earlyoptions['debugger']:
ui.warn(_("entering debugger - "
"type c to continue starting hg or h for help\n"))
@@ -311,7 +316,7 @@
ui.flush()
except: # re-raises
# enter the debugger when we hit an exception
- if _earlyreqoptbool(req, 'debugger', ['--debugger']):
+ if req.earlyoptions['debugger']:
traceback.print_exc()
debugmortem[debugger](sys.exc_info()[2])
raise
@@ -410,7 +415,7 @@
# tokenize each argument into exactly one word.
replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
# escape '\$' for regex
- regex = '|'.join(replacemap.keys()).replace('$', r'\$')
+ regex = '|'.join(replacemap.keys()).replace('$', br'\$')
r = re.compile(regex)
return r.sub(lambda x: replacemap[x.group()], cmd)
@@ -452,10 +457,10 @@
return m.group()
else:
ui.debug("No argument found for substitution "
- "of %i variable in alias '%s' definition."
+ "of %i variable in alias '%s' definition.\n"
% (int(m.groups()[0]), self.name))
return ''
- cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
+ cmd = re.sub(br'\$(\d+|\$)', _checkvar, self.definition[1:])
cmd = aliasinterpolate(self.name, args, cmd)
return ui.system(cmd, environ=env,
blockedtag='alias_%s' % self.name)
@@ -468,16 +473,15 @@
self.badalias = (_("error in definition for alias '%s': %s")
% (self.name, inst))
return
+ earlyopts, args = _earlysplitopts(args)
+ if earlyopts:
+ self.badalias = (_("error in definition for alias '%s': %s may "
+ "only be given on the command line")
+ % (self.name, '/'.join(zip(*earlyopts)[0])))
+ return
self.cmdname = cmd = args.pop(0)
self.givenargs = args
- for invalidarg in commands.earlyoptflags:
- if _earlygetopt([invalidarg], args):
- self.badalias = (_("error in definition for alias '%s': %s may "
- "only be given on the command line")
- % (self.name, invalidarg))
- return
-
try:
tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
if len(tableentry) > 2:
@@ -646,139 +650,20 @@
return configs
-def _earlyparseopts(args):
+def _earlyparseopts(ui, args):
options = {}
fancyopts.fancyopts(args, commands.globalopts, options,
- gnu=False, early=True)
+ gnu=not ui.plain('strictflags'), early=True,
+ optaliases={'repository': ['repo']})
return options
-def _earlygetopt(aliases, args, strip=True):
- """Return list of values for an option (or aliases).
-
- The values are listed in the order they appear in args.
- The options and values are removed from args if strip=True.
-
- >>> args = [b'x', b'--cwd', b'foo', b'y']
- >>> _earlygetopt([b'--cwd'], args), args
- (['foo'], ['x', 'y'])
-
- >>> args = [b'x', b'--cwd=bar', b'y']
- >>> _earlygetopt([b'--cwd'], args), args
- (['bar'], ['x', 'y'])
-
- >>> args = [b'x', b'--cwd=bar', b'y']
- >>> _earlygetopt([b'--cwd'], args, strip=False), args
- (['bar'], ['x', '--cwd=bar', 'y'])
-
- >>> args = [b'x', b'-R', b'foo', b'y']
- >>> _earlygetopt([b'-R'], args), args
- (['foo'], ['x', 'y'])
-
- >>> args = [b'x', b'-R', b'foo', b'y']
- >>> _earlygetopt([b'-R'], args, strip=False), args
- (['foo'], ['x', '-R', 'foo', 'y'])
-
- >>> args = [b'x', b'-Rbar', b'y']
- >>> _earlygetopt([b'-R'], args), args
- (['bar'], ['x', 'y'])
-
- >>> args = [b'x', b'-Rbar', b'y']
- >>> _earlygetopt([b'-R'], args, strip=False), args
- (['bar'], ['x', '-Rbar', 'y'])
-
- >>> args = [b'x', b'-R=bar', b'y']
- >>> _earlygetopt([b'-R'], args), args
- (['=bar'], ['x', 'y'])
-
- >>> args = [b'x', b'-R', b'--', b'y']
- >>> _earlygetopt([b'-R'], args), args
- ([], ['x', '-R', '--', 'y'])
- """
- try:
- argcount = args.index("--")
- except ValueError:
- argcount = len(args)
- shortopts = [opt for opt in aliases if len(opt) == 2]
- values = []
- pos = 0
- while pos < argcount:
- fullarg = arg = args[pos]
- equals = -1
- if arg.startswith('--'):
- equals = arg.find('=')
- if equals > -1:
- arg = arg[:equals]
- if arg in aliases:
- if equals > -1:
- values.append(fullarg[equals + 1:])
- if strip:
- del args[pos]
- argcount -= 1
- else:
- pos += 1
- else:
- if pos + 1 >= argcount:
- # ignore and let getopt report an error if there is no value
- break
- values.append(args[pos + 1])
- if strip:
- del args[pos:pos + 2]
- argcount -= 2
- else:
- pos += 2
- elif arg[:2] in shortopts:
- # short option can have no following space, e.g. hg log -Rfoo
- values.append(args[pos][2:])
- if strip:
- del args[pos]
- argcount -= 1
- else:
- pos += 1
- else:
- pos += 1
- return values
-
-def _earlyreqopt(req, name, aliases):
- """Peek a list option without using a full options table"""
- if req.ui.plain('strictflags'):
- return req.earlyoptions[name]
- values = _earlygetopt(aliases, req.args, strip=False)
- req.earlyoptions[name] = values
- return values
-
-def _earlyreqoptstr(req, name, aliases):
- """Peek a string option without using a full options table"""
- if req.ui.plain('strictflags'):
- return req.earlyoptions[name]
- value = (_earlygetopt(aliases, req.args, strip=False) or [''])[-1]
- req.earlyoptions[name] = value
- return value
-
-def _earlyreqoptbool(req, name, aliases):
- """Peek a boolean option without using a full options table
-
- >>> req = request([b'x', b'--debugger'], uimod.ui())
- >>> _earlyreqoptbool(req, b'debugger', [b'--debugger'])
- True
-
- >>> req = request([b'x', b'--', b'--debugger'], uimod.ui())
- >>> _earlyreqoptbool(req, b'debugger', [b'--debugger'])
- """
- if req.ui.plain('strictflags'):
- return req.earlyoptions[name]
- try:
- argcount = req.args.index("--")
- except ValueError:
- argcount = len(req.args)
- value = None
- pos = 0
- while pos < argcount:
- arg = req.args[pos]
- if arg in aliases:
- value = True
- pos += 1
- req.earlyoptions[name] = value
- return value
+def _earlysplitopts(args):
+ """Split args into a list of possible early options and remainder args"""
+ shortoptions = 'R:'
+ # TODO: perhaps 'debugger' should be included
+ longoptions = ['cwd=', 'repository=', 'repo=', 'config=']
+ return fancyopts.earlygetopt(args, shortoptions, longoptions,
+ gnu=True, keepsep=True)
def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
# run pre-hook, and abort if it fails
@@ -847,8 +732,7 @@
if cmd and util.safehasattr(fn, 'shell'):
# shell alias shouldn't receive early options which are consumed by hg
- args = args[:]
- _earlygetopt(commands.earlyoptflags, args, strip=True)
+ _earlyopts, args = _earlysplitopts(args)
d = lambda: fn(ui, *args[1:])
return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
[], {})
@@ -858,11 +742,11 @@
ui = req.ui
# check for cwd
- cwd = _earlyreqoptstr(req, 'cwd', ['--cwd'])
+ cwd = req.earlyoptions['cwd']
if cwd:
os.chdir(cwd)
- rpath = _earlyreqoptstr(req, 'repository', ["-R", "--repository", "--repo"])
+ rpath = req.earlyoptions['repository']
path, lui = _getlocal(ui, rpath)
uis = {ui, lui}
@@ -870,7 +754,7 @@
if req.repo:
uis.add(req.repo.ui)
- if _earlyreqoptbool(req, 'profile', ['--profile']):
+ if req.earlyoptions['profile']:
for ui_ in uis:
ui_.setconfig('profiling', 'enabled', 'true', '--profile')
@@ -1006,10 +890,11 @@
if not func.optionalrepo:
if func.inferrepo and args and not path:
# try to infer -R from command args
- repos = map(cmdutil.findrepo, args)
+ repos = pycompat.maplist(cmdutil.findrepo, args)
guess = repos[0]
if guess and repos.count(guess) == len(repos):
req.args = ['--repository', guess] + fullargs
+ req.earlyoptions['repository'] = guess
return _dispatch(req)
if not path:
raise error.RepoError(_("no repository found in"
--- a/mercurial/error.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/error.py Mon Jan 22 17:53:02 2018 -0500
@@ -301,3 +301,7 @@
class PeerTransportError(Abort):
"""Transport-level I/O error when communicating with a peer repo."""
+
+class InMemoryMergeConflictsError(Exception):
+ """Exception raised when merge conflicts arose during an in-memory merge."""
+ __bytes__ = _tobytes
--- a/mercurial/exchange.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/exchange.py Mon Jan 22 17:53:02 2018 -0500
@@ -13,6 +13,7 @@
from .i18n import _
from .node import (
+ bin,
hex,
nullid,
)
@@ -23,6 +24,7 @@
discovery,
error,
lock as lockmod,
+ logexchange,
obsolete,
phases,
pushkey,
@@ -512,7 +514,11 @@
def _pushdiscoverychangeset(pushop):
"""discover the changeset that need to be pushed"""
fci = discovery.findcommonincoming
- commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
+ if pushop.revs:
+ commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
+ ancestorsof=pushop.revs)
+ else:
+ commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
common, inc, remoteheads = commoninc
fco = discovery.findcommonoutgoing
outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
@@ -742,6 +748,22 @@
or pushop.outobsmarkers
or pushop.outbookmarks)
+@b2partsgenerator('check-bookmarks')
+def _pushb2checkbookmarks(pushop, bundler):
+ """insert bookmark move checking"""
+ if not _pushing(pushop) or pushop.force:
+ return
+ b2caps = bundle2.bundle2caps(pushop.remote)
+ hasbookmarkcheck = 'bookmarks' in b2caps
+ if not (pushop.outbookmarks and hasbookmarkcheck):
+ return
+ data = []
+ for book, old, new in pushop.outbookmarks:
+ old = bin(old)
+ data.append((book, old))
+ checkdata = bookmod.binaryencode(data)
+ bundler.newpart('check:bookmarks', data=checkdata)
+
@b2partsgenerator('check-phases')
def _pushb2checkphases(pushop, bundler):
"""insert phase move checking"""
@@ -879,8 +901,46 @@
if 'bookmarks' in pushop.stepsdone:
return
b2caps = bundle2.bundle2caps(pushop.remote)
- if 'pushkey' not in b2caps:
+
+ legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
+ legacybooks = 'bookmarks' in legacy
+
+ if not legacybooks and 'bookmarks' in b2caps:
+ return _pushb2bookmarkspart(pushop, bundler)
+ elif 'pushkey' in b2caps:
+ return _pushb2bookmarkspushkey(pushop, bundler)
+
+def _bmaction(old, new):
+ """small utility for bookmark pushing"""
+ if not old:
+ return 'export'
+ elif not new:
+ return 'delete'
+ return 'update'
+
+def _pushb2bookmarkspart(pushop, bundler):
+ pushop.stepsdone.add('bookmarks')
+ if not pushop.outbookmarks:
return
+
+ allactions = []
+ data = []
+ for book, old, new in pushop.outbookmarks:
+ new = bin(new)
+ data.append((book, new))
+ allactions.append((book, _bmaction(old, new)))
+ checkdata = bookmod.binaryencode(data)
+ bundler.newpart('bookmarks', data=checkdata)
+
+ def handlereply(op):
+ ui = pushop.ui
+ # if success
+ for book, action in allactions:
+ ui.status(bookmsgmap[action][0] % book)
+
+ return handlereply
+
+def _pushb2bookmarkspushkey(pushop, bundler):
pushop.stepsdone.add('bookmarks')
part2book = []
enc = pushkey.encode
@@ -955,7 +1015,8 @@
# create reply capability
capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
- allowpushback=pushback))
+ allowpushback=pushback,
+ role='client'))
bundler.newpart('replycaps', data=capsblob)
replyhandlers = []
for partgenname in b2partsgenorder:
@@ -1273,7 +1334,8 @@
if opargs is None:
opargs = {}
pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
- streamclonerequested=streamclonerequested, **opargs)
+ streamclonerequested=streamclonerequested,
+ **pycompat.strkwargs(opargs))
peerlocal = pullop.remote.local()
if peerlocal:
@@ -1284,11 +1346,8 @@
" %s") % (', '.join(sorted(missing)))
raise error.Abort(msg)
- wlock = lock = None
- try:
- wlock = pullop.repo.wlock()
- lock = pullop.repo.lock()
- pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
+ pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
+ with repo.wlock(), repo.lock(), pullop.trmanager:
# This should ideally be in _pullbundle2(). However, it needs to run
# before discovery to avoid extra work.
_maybeapplyclonebundle(pullop)
@@ -1300,9 +1359,10 @@
_pullphase(pullop)
_pullbookmarks(pullop)
_pullobsolete(pullop)
- pullop.trmanager.close()
- finally:
- lockmod.release(pullop.trmanager, lock, wlock)
+
+ # storing remotenames
+ if repo.ui.configbool('experimental', 'remotenames'):
+ logexchange.pullremotenames(repo, remote)
return pullop
@@ -1348,7 +1408,8 @@
# all known bundle2 servers now support listkeys, but lets be nice with
# new implementation.
return
- pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
+ books = pullop.remote.listkeys('bookmarks')
+ pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
@pulldiscovery('changegroup')
@@ -1388,32 +1449,59 @@
"""pull data using bundle2
For now, the only supported data are changegroup."""
- kwargs = {'bundlecaps': caps20to10(pullop.repo)}
+ kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
+
+ # make ui easier to access
+ ui = pullop.repo.ui
# At the moment we don't do stream clones over bundle2. If that is
# implemented then here's where the check for that will go.
- streaming = False
+ streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
- # pulling changegroup
- pullop.stepsdone.add('changegroup')
-
+ # declare pull perimeters
kwargs['common'] = pullop.common
kwargs['heads'] = pullop.heads or pullop.rheads
- kwargs['cg'] = pullop.fetch
- ui = pullop.repo.ui
- legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
- hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
- if (not legacyphase and hasbinaryphase):
- kwargs['phases'] = True
+ if streaming:
+ kwargs['cg'] = False
+ kwargs['stream'] = True
+ pullop.stepsdone.add('changegroup')
pullop.stepsdone.add('phases')
+ else:
+ # pulling changegroup
+ pullop.stepsdone.add('changegroup')
+
+ kwargs['cg'] = pullop.fetch
+
+ legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
+ hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
+ if (not legacyphase and hasbinaryphase):
+ kwargs['phases'] = True
+ pullop.stepsdone.add('phases')
+
+ if 'listkeys' in pullop.remotebundle2caps:
+ if 'phases' not in pullop.stepsdone:
+ kwargs['listkeys'] = ['phases']
+
+ bookmarksrequested = False
+ legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
+ hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
+
+ if pullop.remotebookmarks is not None:
+ pullop.stepsdone.add('request-bookmarks')
+
+ if ('request-bookmarks' not in pullop.stepsdone
+ and pullop.remotebookmarks is None
+ and not legacybookmark and hasbinarybook):
+ kwargs['bookmarks'] = True
+ bookmarksrequested = True
+
if 'listkeys' in pullop.remotebundle2caps:
- if 'phases' not in pullop.stepsdone:
- kwargs['listkeys'] = ['phases']
- if pullop.remotebookmarks is None:
+ if 'request-bookmarks' not in pullop.stepsdone:
# make sure to always includes bookmark data when migrating
# `hg incoming --bundle` to using this function.
+ pullop.stepsdone.add('request-bookmarks')
kwargs.setdefault('listkeys', []).append('bookmarks')
# If this is a full pull / clone and the server supports the clone bundles
@@ -1441,7 +1529,9 @@
_pullbundle2extraprepare(pullop, kwargs)
bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
try:
- op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
+ op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
+ op.modes['bookmarks'] = 'records'
+ bundle2.processbundle(pullop.repo, bundle, op=op)
except bundle2.AbortFromPart as exc:
pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
raise error.Abort(_('pull failed on remote'), hint=exc.hint)
@@ -1457,9 +1547,15 @@
_pullapplyphases(pullop, value)
# processing bookmark update
- for namespace, value in op.records['listkeys']:
- if namespace == 'bookmarks':
- pullop.remotebookmarks = value
+ if bookmarksrequested:
+ books = {}
+ for record in op.records['bookmarks']:
+ books[record['bookmark']] = record["node"]
+ pullop.remotebookmarks = books
+ else:
+ for namespace, value in op.records['listkeys']:
+ if namespace == 'bookmarks':
+ pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
# bookmark data were either already there or pulled in the bundle
if pullop.remotebookmarks is not None:
@@ -1552,7 +1648,6 @@
pullop.stepsdone.add('bookmarks')
repo = pullop.repo
remotebookmarks = pullop.remotebookmarks
- remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
pullop.remote.url(),
pullop.gettransaction,
@@ -1586,10 +1681,10 @@
pullop.repo.invalidatevolatilesets()
return tr
-def caps20to10(repo):
+def caps20to10(repo, role):
"""return a set with appropriate options to use bundle20 during getbundle"""
caps = {'HG20'}
- capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
+ capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
caps.add('bundle2=' + urlreq.quote(capsblob))
return caps
@@ -1632,9 +1727,11 @@
Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
passed.
- Returns an iterator over raw chunks (of varying sizes).
+ Returns a 2-tuple of a dict with metadata about the generated bundle
+ and an iterator over raw chunks (of varying sizes).
"""
kwargs = pycompat.byteskwargs(kwargs)
+ info = {}
usebundle2 = bundle2requested(bundlecaps)
# bundle10 case
if not usebundle2:
@@ -1645,10 +1742,12 @@
raise ValueError(_('unsupported getbundle arguments: %s')
% ', '.join(sorted(kwargs.keys())))
outgoing = _computeoutgoing(repo, heads, common)
- return changegroup.makestream(repo, outgoing, '01', source,
- bundlecaps=bundlecaps)
+ info['bundleversion'] = 1
+ return info, changegroup.makestream(repo, outgoing, '01', source,
+ bundlecaps=bundlecaps)
# bundle20 case
+ info['bundleversion'] = 2
b2caps = {}
for bcaps in bundlecaps:
if bcaps.startswith('bundle2='):
@@ -1664,14 +1763,41 @@
func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
**pycompat.strkwargs(kwargs))
- return bundler.getchunks()
+ info['prefercompressed'] = bundler.prefercompressed
+
+ return info, bundler.getchunks()
+
+@getbundle2partsgenerator('stream2')
+def _getbundlestream2(bundler, repo, source, bundlecaps=None,
+ b2caps=None, heads=None, common=None, **kwargs):
+ if not kwargs.get('stream', False):
+ return
+
+ if not streamclone.allowservergeneration(repo):
+ raise error.Abort(_('stream data requested but server does not allow '
+ 'this feature'),
+ hint=_('well-behaved clients should not be '
+ 'requesting stream data from servers not '
+ 'advertising it; the client may be buggy'))
+
+ # Stream clones don't compress well. And compression undermines a
+ # goal of stream clones, which is to be fast. Communicate the desire
+ # to avoid compression to consumers of the bundle.
+ bundler.prefercompressed = False
+
+ filecount, bytecount, it = streamclone.generatev2(repo)
+ requirements = ' '.join(sorted(repo.requirements))
+ part = bundler.newpart('stream2', data=it)
+ part.addparam('bytecount', '%d' % bytecount, mandatory=True)
+ part.addparam('filecount', '%d' % filecount, mandatory=True)
+ part.addparam('requirements', requirements, mandatory=True)
@getbundle2partsgenerator('changegroup')
def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
b2caps=None, heads=None, common=None, **kwargs):
"""add a changegroup part to the requested bundle"""
cgstream = None
- if kwargs.get('cg', True):
+ if kwargs.get(r'cg', True):
# build changegroup bundle here.
version = '01'
cgversions = b2caps.get('changegroup')
@@ -1695,11 +1821,24 @@
if 'treemanifest' in repo.requirements:
part.addparam('treemanifest', '1')
+@getbundle2partsgenerator('bookmarks')
+def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
+ b2caps=None, **kwargs):
+ """add a bookmark part to the requested bundle"""
+ if not kwargs.get(r'bookmarks', False):
+ return
+ if 'bookmarks' not in b2caps:
+ raise ValueError(_('no common bookmarks exchange method'))
+ books = bookmod.listbinbookmarks(repo)
+ data = bookmod.binaryencode(books)
+ if data:
+ bundler.newpart('bookmarks', data=data)
+
@getbundle2partsgenerator('listkeys')
def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
b2caps=None, **kwargs):
"""add parts containing listkeys namespaces to the requested bundle"""
- listkeys = kwargs.get('listkeys', ())
+ listkeys = kwargs.get(r'listkeys', ())
for namespace in listkeys:
part = bundler.newpart('listkeys')
part.addparam('namespace', namespace)
@@ -1710,7 +1849,7 @@
def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
b2caps=None, heads=None, **kwargs):
"""add an obsolescence markers part to the requested bundle"""
- if kwargs.get('obsmarkers', False):
+ if kwargs.get(r'obsmarkers', False):
if heads is None:
heads = repo.heads()
subset = [c.node() for c in repo.set('::%ln', heads)]
@@ -1722,7 +1861,7 @@
def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
b2caps=None, heads=None, **kwargs):
"""add phase heads part to the requested bundle"""
- if kwargs.get('phases', False):
+ if kwargs.get(r'phases', False):
if not 'heads' in b2caps.get('phases'):
raise ValueError(_('no common phases exchange method'))
if heads is None:
@@ -1779,23 +1918,12 @@
# Don't send unless:
# - changeset are being exchanged,
# - the client supports it.
- if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
+ if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
return
outgoing = _computeoutgoing(repo, heads, common)
bundle2.addparttagsfnodescache(repo, bundler, outgoing)
-def _getbookmarks(repo, **kwargs):
- """Returns bookmark to node mapping.
-
- This function is primarily used to generate `bookmarks` bundle2 part.
- It is a separate function in order to make it easy to wrap it
- in extensions. Passing `kwargs` to the function makes it easy to
- add new parameters in extensions.
- """
-
- return dict(bookmod.listbinbookmarks(repo))
-
def check_heads(repo, their_heads, context):
"""check if the heads of a repo have been modified
--- a/mercurial/fancyopts.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/fancyopts.py Mon Jan 22 17:53:02 2018 -0500
@@ -119,7 +119,7 @@
>>> get([b'--cwd=foo', b'x', b'y', b'-R', b'bar', b'--debugger'], gnu=False)
([('--cwd', 'foo')], ['x', 'y', '-R', 'bar', '--debugger'])
>>> get([b'--unknown', b'--cwd=foo', b'--', '--debugger'], gnu=False)
- ([], ['--unknown', '--cwd=foo', '--debugger'])
+ ([], ['--unknown', '--cwd=foo', '--', '--debugger'])
stripping early options (without loosing '--'):
@@ -141,6 +141,13 @@
>>> get([b'-q', b'--'])
([('-q', '')], [])
+ '--' may be a value:
+
+ >>> get([b'-R', b'--', b'x'])
+ ([('-R', '--')], ['x'])
+ >>> get([b'--cwd', b'--', b'x'])
+ ([('--cwd', '--')], ['x'])
+
value passed to bool options:
>>> get([b'--debugger=foo', b'x'])
@@ -163,20 +170,16 @@
>>> get([b'-', b'y'])
([], ['-', 'y'])
"""
- # ignoring everything just after '--' isn't correct as '--' may be an
- # option value (e.g. ['-R', '--']), but we do that consistently.
- try:
- argcount = args.index('--')
- except ValueError:
- argcount = len(args)
-
parsedopts = []
parsedargs = []
pos = 0
- while pos < argcount:
+ while pos < len(args):
arg = args[pos]
+ if arg == '--':
+ pos += not keepsep
+ break
flag, hasval, val, takeval = _earlyoptarg(arg, shortlist, namelist)
- if not hasval and takeval and pos + 1 >= argcount:
+ if not hasval and takeval and pos + 1 >= len(args):
# missing last argument
break
if not flag or hasval and not takeval:
@@ -195,38 +198,10 @@
parsedopts.append((flag, args[pos + 1]))
pos += 2
- parsedargs.extend(args[pos:argcount])
- parsedargs.extend(args[argcount + (not keepsep):])
+ parsedargs.extend(args[pos:])
return parsedopts, parsedargs
-def gnugetopt(args, options, longoptions):
- """Parse options mostly like getopt.gnu_getopt.
-
- This is different from getopt.gnu_getopt in that an argument of - will
- become an argument of - instead of vanishing completely.
- """
- extraargs = []
- if '--' in args:
- stopindex = args.index('--')
- extraargs = args[stopindex + 1:]
- args = args[:stopindex]
- opts, parseargs = pycompat.getoptb(args, options, longoptions)
- args = []
- while parseargs:
- arg = parseargs.pop(0)
- if arg and arg[0:1] == '-' and len(arg) > 1:
- parseargs.insert(0, arg)
- topts, newparseargs = pycompat.getoptb(parseargs,\
- options, longoptions)
- opts = opts + topts
- parseargs = newparseargs
- else:
- args.append(arg)
- args.extend(extraargs)
- return opts, args
-
-
-def fancyopts(args, options, state, gnu=False, early=False):
+def fancyopts(args, options, state, gnu=False, early=False, optaliases=None):
"""
read args, parse options, and store options in state
@@ -246,8 +221,15 @@
integer - parameter strings is stored as int
function - call function with parameter
+ optaliases is a mapping from a canonical option name to a list of
+ additional long options. This exists for preserving backward compatibility
+ of early options. If we want to use it extensively, please consider moving
+ the functionality to the options table (e.g separate long options by '|'.)
+
non-option args are returned
"""
+ if optaliases is None:
+ optaliases = {}
namelist = []
shortlist = ''
argmap = {}
@@ -261,10 +243,13 @@
else:
short, name, default, comment = option
# convert opts to getopt format
- oname = name
+ onames = [name]
+ onames.extend(optaliases.get(name, []))
name = name.replace('-', '_')
- argmap['-' + short] = argmap['--' + oname] = name
+ argmap['-' + short] = name
+ for n in onames:
+ argmap['--' + n] = name
defmap[name] = default
# copy defaults to state
@@ -279,30 +264,30 @@
if not (default is None or default is True or default is False):
if short:
short += ':'
- if oname:
- oname += '='
- elif oname not in nevernegate:
- if oname.startswith('no-'):
- insert = oname[3:]
- else:
- insert = 'no-' + oname
- # backout (as a practical example) has both --commit and
- # --no-commit options, so we don't want to allow the
- # negations of those flags.
- if insert not in alllong:
- assert ('--' + oname) not in negations
- negations['--' + insert] = '--' + oname
- namelist.append(insert)
+ onames = [n + '=' for n in onames]
+ elif name not in nevernegate:
+ for n in onames:
+ if n.startswith('no-'):
+ insert = n[3:]
+ else:
+ insert = 'no-' + n
+ # backout (as a practical example) has both --commit and
+ # --no-commit options, so we don't want to allow the
+ # negations of those flags.
+ if insert not in alllong:
+ assert ('--' + n) not in negations
+ negations['--' + insert] = '--' + n
+ namelist.append(insert)
if short:
shortlist += short
if name:
- namelist.append(oname)
+ namelist.extend(onames)
# parse arguments
if early:
parse = functools.partial(earlygetopt, gnu=gnu)
elif gnu:
- parse = gnugetopt
+ parse = pycompat.gnugetoptb
else:
parse = pycompat.getoptb
opts, args = parse(args, shortlist, namelist)
--- a/mercurial/filelog.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/filelog.py Mon Jan 22 17:53:02 2018 -0500
@@ -43,6 +43,8 @@
def __init__(self, opener, path):
super(filelog, self).__init__(opener,
"/".join(("data", path + ".i")))
+ # full name of the user visible file, relative to the repository root
+ self.filename = path
def read(self, node):
t = self.revision(node)
--- a/mercurial/filemerge.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/filemerge.py Mon Jan 22 17:53:02 2018 -0500
@@ -241,6 +241,12 @@
ui = repo.ui
fd = fcd.path()
+ # Avoid prompting during an in-memory merge since it doesn't support merge
+ # conflicts.
+ if fcd.changectx().isinmemory():
+ raise error.InMemoryMergeConflictsError('in-memory merge does not '
+ 'support file conflicts')
+
prompts = partextras(labels)
prompts['fd'] = fd
try:
@@ -465,11 +471,10 @@
a = _workingpath(repo, fcd)
fd = fcd.path()
- # Run ``flushall()`` to make any missing folders the following wwrite
- # calls might be depending on.
from . import context
if isinstance(fcd, context.overlayworkingfilectx):
- fcd.ctx().flushall()
+ raise error.InMemoryMergeConflictsError('in-memory merge does not '
+ 'support the :dump tool.')
util.writefile(a + ".local", fcd.decodeddata())
repo.wwrite(fd + ".other", fco.data(), fco.flags())
@@ -485,6 +490,18 @@
return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
labels=labels)
+def _xmergeimm(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
+ # In-memory merge simply raises an exception on all external merge tools,
+ # for now.
+ #
+ # It would be possible to run most tools with temporary files, but this
+ # raises the question of what to do if the user only partially resolves the
+ # file -- we can't leave a merge state. (Copy to somewhere in the .hg/
+ # directory and tell the user how to get it is my best idea, but it's
+ # clunky.)
+ raise error.InMemoryMergeConflictsError('in-memory merge does not support '
+ 'external merge tools')
+
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
tool, toolpath, binary, symlink = toolconf
if fcd.isabsent() or fco.isabsent():
@@ -526,7 +543,7 @@
util.unlink(b)
util.unlink(c)
-def _formatconflictmarker(repo, ctx, template, label, pad):
+def _formatconflictmarker(ctx, template, label, pad):
"""Applies the given template to the ctx, prefixed by the label.
Pad is the minimum width of the label prefix, so that multiple markers
@@ -535,10 +552,7 @@
if ctx.node() is None:
ctx = ctx.p1()
- props = templatekw.keywords.copy()
- props['templ'] = template
- props['ctx'] = ctx
- props['repo'] = repo
+ props = {'ctx': ctx}
templateresult = template.render(props)
label = ('%s:' % label).ljust(pad + 1)
@@ -564,14 +578,16 @@
ui = repo.ui
template = ui.config('ui', 'mergemarkertemplate')
template = templater.unquotestring(template)
- tmpl = formatter.maketemplater(ui, template)
+ tres = formatter.templateresources(ui, repo)
+ tmpl = formatter.maketemplater(ui, template, defaults=templatekw.keywords,
+ resources=tres)
pad = max(len(l) for l in labels)
- newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad),
- _formatconflictmarker(repo, co, tmpl, labels[1], pad)]
+ newlabels = [_formatconflictmarker(cd, tmpl, labels[0], pad),
+ _formatconflictmarker(co, tmpl, labels[1], pad)]
if len(labels) > 2:
- newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
+ newlabels.append(_formatconflictmarker(ca, tmpl, labels[2], pad))
return newlabels
def partextras(labels):
@@ -602,6 +618,9 @@
(if any), the backup is used to undo certain premerges, confirm whether a
merge changed anything, and determine what line endings the new file should
have.
+
+ Backups only need to be written once (right before the premerge) since their
+ content doesn't change afterwards.
"""
if fcd.isabsent():
return None
@@ -612,21 +631,26 @@
back = scmutil.origpath(ui, repo, a)
inworkingdir = (back.startswith(repo.wvfs.base) and not
back.startswith(repo.vfs.base))
-
if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir:
# If the backup file is to be in the working directory, and we're
# merging in-memory, we must redirect the backup to the memory context
# so we don't disturb the working directory.
relpath = back[len(repo.wvfs.base) + 1:]
- wctx[relpath].write(fcd.data(), fcd.flags())
+ if premerge:
+ wctx[relpath].write(fcd.data(), fcd.flags())
return wctx[relpath]
else:
- # Otherwise, write to wherever the user specified the backups should go.
- #
+ if premerge:
+ # Otherwise, write to wherever path the user specified the backups
+ # should go. We still need to switch based on whether the source is
+ # in-memory so we can use the fast path of ``util.copy`` if both are
+ # on disk.
+ if isinstance(fcd, context.overlayworkingfilectx):
+ util.writefile(back, fcd.data())
+ else:
+ util.copyfile(a, back)
# A arbitraryfilectx is returned, so we can run the same functions on
# the backup context regardless of where it lives.
- if premerge:
- util.copyfile(a, back)
return context.arbitraryfilectx(back, repo=repo)
def _maketempfiles(repo, fco, fca):
@@ -683,16 +707,14 @@
onfailure = func.onfailure
precheck = func.precheck
else:
- func = _xmerge
+ if wctx.isinmemory():
+ func = _xmergeimm
+ else:
+ func = _xmerge
mergetype = fullmerge
onfailure = _("merging %s failed!\n")
precheck = None
- # If using deferred writes, must flush any deferred contents if running
- # an external merge tool since it has arbitrary access to the working
- # copy.
- wctx.flushall()
-
toolconf = tool, toolpath, binary, symlink
if mergetype == nomerge:
@@ -710,6 +732,10 @@
if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
toolconf):
if onfailure:
+ if wctx.isinmemory():
+ raise error.InMemoryMergeConflictsError('in-memory merge does '
+ 'not support merge '
+ 'conflicts')
ui.warn(onfailure % fd)
return True, 1, False
@@ -736,6 +762,10 @@
if r:
if onfailure:
+ if wctx.isinmemory():
+ raise error.InMemoryMergeConflictsError('in-memory merge '
+ 'does not support '
+ 'merge conflicts')
ui.warn(onfailure % fd)
_onfilemergefailure(ui)
--- a/mercurial/fileset.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/fileset.py Mon Jan 22 17:53:02 2018 -0500
@@ -12,6 +12,7 @@
from .i18n import _
from . import (
error,
+ match as matchmod,
merge,
parser,
pycompat,
@@ -23,6 +24,7 @@
elements = {
# token-type: binding-strength, primary, prefix, infix, suffix
"(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
+ ":": (15, None, None, ("kindpat", 15), None),
"-": (5, None, ("negate", 19), ("minus", 5), None),
"not": (10, None, ("not", 10), None, None),
"!": (10, None, ("not", 10), None, None),
@@ -49,7 +51,7 @@
c = program[pos]
if c.isspace(): # skip inter-token whitespace
pass
- elif c in "(),-|&+!": # handle simple operators
+ elif c in "(),-:|&+!": # handle simple operators
yield (c, None, pos)
elif (c in '"\'' or c == 'r' and
program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
@@ -99,11 +101,28 @@
raise error.ParseError(_("invalid token"), pos)
return tree
+def getsymbol(x):
+ if x and x[0] == 'symbol':
+ return x[1]
+ raise error.ParseError(_('not a symbol'))
+
def getstring(x, err):
if x and (x[0] == 'string' or x[0] == 'symbol'):
return x[1]
raise error.ParseError(err)
+def _getkindpat(x, y, allkinds, err):
+ kind = getsymbol(x)
+ pat = getstring(y, err)
+ if kind not in allkinds:
+ raise error.ParseError(_("invalid pattern kind: %s") % kind)
+ return '%s:%s' % (kind, pat)
+
+def getpattern(x, allkinds, err):
+ if x and x[0] == 'kindpat':
+ return _getkindpat(x[1], x[2], allkinds, err)
+ return getstring(x, err)
+
def getset(mctx, x):
if not x:
raise error.ParseError(_("missing argument"))
@@ -113,6 +132,10 @@
m = mctx.matcher([x])
return [f for f in mctx.subset if m(f)]
+def kindpatset(mctx, x, y):
+ return stringset(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
+ _("pattern must be a string")))
+
def andset(mctx, x, y):
return getset(mctx.narrow(getset(mctx, x)), y)
@@ -131,6 +154,9 @@
yl = set(getset(mctx, y))
return [f for f in xl if f not in yl]
+def negateset(mctx, x):
+ raise error.ParseError(_("can't use negate operator in this context"))
+
def listset(mctx, a, b):
raise error.ParseError(_("can't use a list in this context"),
hint=_('see hg help "filesets.x or y"'))
@@ -225,8 +251,8 @@
return [f for f in mctx.subset if f in s]
def func(mctx, a, b):
- if a[0] == 'symbol' and a[1] in symbols:
- funcname = a[1]
+ funcname = getsymbol(a)
+ if funcname in symbols:
enabled = mctx._existingenabled
mctx._existingenabled = funcname in _existingcallers
try:
@@ -237,7 +263,7 @@
keep = lambda fn: getattr(fn, '__doc__', None) is not None
syms = [s for (s, fn) in symbols.items() if keep(fn)]
- raise error.UnknownIdentifier(a[1], syms)
+ raise error.UnknownIdentifier(funcname, syms)
def getlist(x):
if not x:
@@ -344,6 +370,34 @@
except ValueError:
raise error.ParseError(_("couldn't parse size: %s") % s)
+def sizematcher(x):
+ """Return a function(size) -> bool from the ``size()`` expression"""
+
+ # i18n: "size" is a keyword
+ expr = getstring(x, _("size requires an expression")).strip()
+ if '-' in expr: # do we have a range?
+ a, b = expr.split('-', 1)
+ a = util.sizetoint(a)
+ b = util.sizetoint(b)
+ return lambda x: x >= a and x <= b
+ elif expr.startswith("<="):
+ a = util.sizetoint(expr[2:])
+ return lambda x: x <= a
+ elif expr.startswith("<"):
+ a = util.sizetoint(expr[1:])
+ return lambda x: x < a
+ elif expr.startswith(">="):
+ a = util.sizetoint(expr[2:])
+ return lambda x: x >= a
+ elif expr.startswith(">"):
+ a = util.sizetoint(expr[1:])
+ return lambda x: x > a
+ elif expr[0].isdigit or expr[0] == '.':
+ a = util.sizetoint(expr)
+ b = _sizetomax(expr)
+ return lambda x: x >= a and x <= b
+ raise error.ParseError(_("couldn't parse size: %s") % expr)
+
@predicate('size(expression)', callexisting=True)
def size(mctx, x):
"""File size matches the given expression. Examples:
@@ -353,33 +407,7 @@
- size('>= .5MB') - files at least 524288 bytes
- size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
"""
-
- # i18n: "size" is a keyword
- expr = getstring(x, _("size requires an expression")).strip()
- if '-' in expr: # do we have a range?
- a, b = expr.split('-', 1)
- a = util.sizetoint(a)
- b = util.sizetoint(b)
- m = lambda x: x >= a and x <= b
- elif expr.startswith("<="):
- a = util.sizetoint(expr[2:])
- m = lambda x: x <= a
- elif expr.startswith("<"):
- a = util.sizetoint(expr[1:])
- m = lambda x: x < a
- elif expr.startswith(">="):
- a = util.sizetoint(expr[2:])
- m = lambda x: x >= a
- elif expr.startswith(">"):
- a = util.sizetoint(expr[1:])
- m = lambda x: x > a
- elif expr[0].isdigit or expr[0] == '.':
- a = util.sizetoint(expr)
- b = _sizetomax(expr)
- m = lambda x: x >= a and x <= b
- else:
- raise error.ParseError(_("couldn't parse size: %s") % expr)
-
+ m = sizematcher(x)
return [f for f in mctx.existing() if m(mctx.ctx[f].size())]
@predicate('encoding(name)', callexisting=True)
@@ -496,10 +524,9 @@
ctx = mctx.ctx
sstate = sorted(ctx.substate)
if x:
- # i18n: "subrepo" is a keyword
- pat = getstring(x, _("subrepo requires a pattern or no arguments"))
-
- from . import match as matchmod # avoid circular import issues
+ pat = getpattern(x, matchmod.allpatternkinds,
+ # i18n: "subrepo" is a keyword
+ _("subrepo requires a pattern or no arguments"))
fast = not matchmod.patkind(pat)
if fast:
def m(s):
@@ -513,9 +540,11 @@
methods = {
'string': stringset,
'symbol': stringset,
+ 'kindpat': kindpatset,
'and': andset,
'or': orset,
'minus': minusset,
+ 'negate': negateset,
'list': listset,
'group': getset,
'not': notset,
--- a/mercurial/formatter.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/formatter.py Mon Jan 22 17:53:02 2018 -0500
@@ -94,14 +94,14 @@
>>> def subrepos(ui, fm):
... fm.startitem()
-... fm.write(b'repo', b'[%s]\\n', b'baz')
+... fm.write(b'reponame', b'[%s]\\n', b'baz')
... files(ui, fm.nested(b'files'))
... fm.end()
>>> show(subrepos)
[baz]
foo
bar
->>> show(subrepos, template=b'{repo}: {join(files % "{path}", ", ")}\\n')
+>>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
baz: foo, bar
"""
@@ -363,11 +363,12 @@
self._out = out
spec = lookuptemplate(ui, topic, opts.get('template', ''))
self._tref = spec.ref
- self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
+ self._t = loadtemplater(ui, spec, defaults=templatekw.keywords,
+ resources=templateresources(ui),
+ cache=templatekw.defaulttempl)
self._parts = templatepartsmap(spec, self._t,
['docheader', 'docfooter', 'separator'])
self._counter = itertools.count()
- self._cache = {} # for templatekw/funcs to store reusable data
self._renderitem('docheader', {})
def _showitem(self):
@@ -386,17 +387,14 @@
# function will have to declare dependent resources. e.g.
# @templatekeyword(..., requires=('ctx',))
props = {}
- if 'ctx' in item:
- props.update(templatekw.keywords)
# explicitly-defined fields precede templatekw
props.update(item)
if 'ctx' in item:
# but template resources must be always available
- props['templ'] = self._t
props['repo'] = props['ctx'].repo()
props['revcache'] = {}
props = pycompat.strkwargs(props)
- g = self._t(ref, ui=self._ui, cache=self._cache, **props)
+ g = self._t(ref, **props)
self._out.write(templater.stringify(g))
def end(self):
@@ -468,24 +466,39 @@
partsmap[part] = ref
return partsmap
-def loadtemplater(ui, spec, cache=None):
+def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
"""Create a templater from either a literal template or loading from
a map file"""
assert not (spec.tmpl and spec.mapfile)
if spec.mapfile:
- return templater.templater.frommapfile(spec.mapfile, cache=cache)
- return maketemplater(ui, spec.tmpl, cache=cache)
+ frommapfile = templater.templater.frommapfile
+ return frommapfile(spec.mapfile, defaults=defaults, resources=resources,
+ cache=cache)
+ return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources,
+ cache=cache)
-def maketemplater(ui, tmpl, cache=None):
+def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
"""Create a templater from a string template 'tmpl'"""
aliases = ui.configitems('templatealias')
- t = templater.templater(cache=cache, aliases=aliases)
+ t = templater.templater(defaults=defaults, resources=resources,
+ cache=cache, aliases=aliases)
t.cache.update((k, templater.unquotestring(v))
for k, v in ui.configitems('templates'))
if tmpl:
t.cache[''] = tmpl
return t
+def templateresources(ui, repo=None):
+ """Create a dict of template resources designed for the default templatekw
+ and function"""
+ return {
+ 'cache': {}, # for templatekw/funcs to store reusable data
+ 'ctx': None,
+ 'repo': repo,
+ 'revcache': None, # per-ctx cache; set later
+ 'ui': ui,
+ }
+
def formatter(ui, out, topic, opts):
template = opts.get("template", "")
if template == "json":
--- a/mercurial/graphmod.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/graphmod.py Mon Jan 22 17:53:02 2018 -0500
@@ -48,9 +48,6 @@
returned.
"""
- if not revs:
- return
-
gpcache = {}
for rev in revs:
--- a/mercurial/hbisect.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hbisect.py Mon Jan 22 17:53:02 2018 -0500
@@ -21,7 +21,7 @@
error,
)
-def bisect(changelog, state):
+def bisect(repo, state):
"""find the next node (if any) for testing during a bisect search.
returns a (nodes, number, good) tuple.
@@ -32,33 +32,15 @@
if searching for a first bad one.
"""
+ changelog = repo.changelog
clparents = changelog.parentrevs
skip = set([changelog.rev(n) for n in state['skip']])
def buildancestors(bad, good):
- # only the earliest bad revision matters
badrev = min([changelog.rev(n) for n in bad])
- goodrevs = [changelog.rev(n) for n in good]
- goodrev = min(goodrevs)
- # build visit array
- ancestors = [None] * (len(changelog) + 1) # an extra for [-1]
-
- # set nodes descended from goodrevs
- for rev in goodrevs:
+ ancestors = collections.defaultdict(lambda: None)
+ for rev in repo.revs("descendants(%ln) - ancestors(%ln)", good, good):
ancestors[rev] = []
- for rev in changelog.revs(goodrev + 1):
- for prev in clparents(rev):
- if ancestors[prev] == []:
- ancestors[rev] = []
-
- # clear good revs from array
- for rev in goodrevs:
- ancestors[rev] = None
- for rev in changelog.revs(len(changelog), goodrev):
- if ancestors[rev] is None:
- for prev in clparents(rev):
- ancestors[prev] = None
-
if ancestors[badrev] is None:
return badrev, None
return badrev, ancestors
--- a/mercurial/help.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help.py Mon Jan 22 17:53:02 2018 -0500
@@ -226,6 +226,7 @@
(['color'], _("Colorizing Outputs"), loaddoc('color')),
(["config", "hgrc"], _("Configuration Files"), loaddoc('config')),
(["dates"], _("Date Formats"), loaddoc('dates')),
+ (["flags"], _("Command-line flags"), loaddoc('flags')),
(["patterns"], _("File Name Patterns"), loaddoc('patterns')),
(['environment', 'env'], _('Environment Variables'),
loaddoc('environment')),
@@ -452,7 +453,7 @@
rst.append(' :%s: %s\n' % (f, h[f]))
ex = opts.get
- anyopts = (ex('keyword') or not (ex('command') or ex('extension')))
+ anyopts = (ex(r'keyword') or not (ex(r'command') or ex(r'extension')))
if not name and anyopts:
exts = listexts(_('enabled extensions:'), extensions.enabled())
if exts:
--- a/mercurial/help/config.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/config.txt Mon Jan 22 17:53:02 2018 -0500
@@ -1723,6 +1723,14 @@
Controls generic server settings.
+``bookmarks-pushkey-compat``
+ Trigger pushkey hook when being pushed bookmark updates. This config exist
+ for compatibility purpose (default to True)
+
+ If you use ``pushkey`` and ``pre-pushkey`` hooks to control bookmark
+ movement we recommend you migrate them to ``txnclose-bookmark`` and
+ ``pretxnclose-bookmark``.
+
``compressionengines``
List of compression engines and their relative priority to advertise
to clients.
@@ -2176,6 +2184,8 @@
(default: True)
``slash``
+ (Deprecated. Use ``slashpath`` template filter instead.)
+
Display paths using a slash (``/``) as the path separator. This
only makes a difference on systems where the default path
separator is not the slash character (e.g. Windows uses the
@@ -2188,6 +2198,10 @@
``ssh``
Command to use for SSH connections. (default: ``ssh``)
+``ssherrorhint``
+ A hint shown to the user in the case of SSH error (e.g.
+ ``Please see http://company/internalwiki/ssh.html``)
+
``strict``
Require exact command names, instead of allowing unambiguous
abbreviations. (default: False)
@@ -2211,6 +2225,10 @@
The timeout used when a lock is held (in seconds), a negative value
means no timeout. (default: 600)
+``timeout.warn``
+ Time (in seconds) before a warning is printed about held lock. A negative
+ value means no warning. (default: 0)
+
``traceback``
Mercurial always prints a traceback when an unknown exception
occurs. Setting this to True will make Mercurial print a traceback
@@ -2260,7 +2278,7 @@
you want it to accept pushes from anybody, you can use the following
command line::
- $ hg --config web.allow_push=* --config web.push_ssl=False serve
+ $ hg --config web.allow-push=* --config web.push_ssl=False serve
Note that this will allow anybody to push anything to the server and
that this should not be used for public servers.
@@ -2287,16 +2305,16 @@
revisions.
(default: False)
-``allowpull``
+``allow-pull``
Whether to allow pulling from the repository. (default: True)
-``allow_push``
+``allow-push``
Whether to allow pushing to the repository. If empty or not set,
pushing is not allowed. If the special value ``*``, any remote
user can push, including unauthenticated users. Otherwise, the
remote user must have been authenticated, and the authenticated
user name must be present in this list. The contents of the
- allow_push list are examined after the deny_push list.
+ allow-push list are examined after the deny_push list.
``allow_read``
If the user has not already been denied repository access due to
@@ -2390,7 +2408,7 @@
push is not denied. If the special value ``*``, all remote users are
denied push. Otherwise, unauthenticated users are all denied, and
any authenticated user name present in this list is also denied. The
- contents of the deny_push list are examined before the allow_push list.
+ contents of the deny_push list are examined before the allow-push list.
``deny_read``
Whether to deny reading/viewing of the repository. If this list is
@@ -2547,6 +2565,10 @@
directory updates in parallel on Unix-like systems, which greatly
helps performance.
+``enabled``
+ Whether to enable workers code to be used.
+ (default: true)
+
``numcpus``
Number of CPUs to use for parallel operations. A zero or
negative value is treated as ``use the default``.
--- a/mercurial/help/environment.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/environment.txt Mon Jan 22 17:53:02 2018 -0500
@@ -73,6 +73,8 @@
``alias``
Don't remove aliases.
+ ``color``
+ Don't disable colored output.
``i18n``
Preserve internationalization.
``revsetalias``
--- a/mercurial/help/filesets.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/filesets.txt Mon Jan 22 17:53:02 2018 -0500
@@ -9,7 +9,8 @@
or double quotes if they contain characters outside of
``[.*{}[]?/\_a-zA-Z0-9\x80-\xff]`` or if they match one of the
predefined predicates. This generally applies to file patterns other
-than globs and arguments for predicates.
+than globs and arguments for predicates. Pattern prefixes such as
+``path:`` may be specified without quoting.
Special characters can be used in quoted identifiers by escaping them,
e.g., ``\n`` is interpreted as a newline. To prevent them from being
@@ -75,4 +76,4 @@
- Remove files listed in foo.lst that contain the letter a or b::
- hg remove "set: 'listfile:foo.lst' and (**a* or **b*)"
+ hg remove "set: listfile:foo.lst and (**a* or **b*)"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/help/flags.txt Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,104 @@
+Most Mercurial commands accept various flags.
+
+Flag names
+==========
+
+Flags for each command are listed in :hg:`help` for that command.
+Additionally, some flags, such as --repository, are global and can be used with
+any command - those are seen in :hg:`help -v`, and can be specified before or
+after the command.
+
+Every flag has at least a long name, such as --repository. Some flags may also
+have a short one-letter name, such as the equivalent -R. Using the short or long
+name is equivalent and has the same effect.
+
+Flags that have a short name can also be bundled together - for instance, to
+specify both --edit (short -e) and --interactive (short -i), one could use::
+
+ hg commit -ei
+
+If any of the bundled flags takes a value (i.e. is not a boolean), it must be
+last, followed by the value::
+
+ hg commit -im 'Message'
+
+Flag types
+==========
+
+Mercurial command-line flags can be strings, numbers, booleans, or lists of
+strings.
+
+Specifying flag values
+======================
+
+The following syntaxes are allowed, assuming a flag 'flagname' with short name
+'f'::
+
+ --flagname=foo
+ --flagname foo
+ -f foo
+ -ffoo
+
+This syntax applies to all non-boolean flags (strings, numbers or lists).
+
+Specifying boolean flags
+========================
+
+Boolean flags do not take a value parameter. To specify a boolean, use the flag
+name to set it to true, or the same name prefixed with 'no-' to set it to
+false::
+
+ hg commit --interactive
+ hg commit --no-interactive
+
+Specifying list flags
+=====================
+
+List flags take multiple values. To specify them, pass the flag multiple times::
+
+ hg files --include mercurial --include tests
+
+Setting flag defaults
+=====================
+
+In order to set a default value for a flag in an hgrc file, it is recommended to
+use aliases::
+
+ [alias]
+ commit = commit --interactive
+
+For more information on hgrc files, see :hg:`help config`.
+
+Overriding flags on the command line
+====================================
+
+If the same non-list flag is specified multiple times on the command line, the
+latest specification is used::
+
+ hg commit -m "Ignored value" -m "Used value"
+
+This includes the use of aliases - e.g., if one has::
+
+ [alias]
+ committemp = commit -m "Ignored value"
+
+then the following command will override that -m::
+
+ hg committemp -m "Used value"
+
+Overriding flag defaults
+========================
+
+Every flag has a default value, and you may also set your own defaults in hgrc
+as described above.
+Except for list flags, defaults can be overridden on the command line simply by
+specifying the flag in that location.
+
+Hidden flags
+============
+
+Some flags are not shown in a command's help by default - specifically, those
+that are deemed to be experimental, deprecated or advanced. To show all flags,
+add the --verbose flag for the help command::
+
+ hg help --verbose commit
--- a/mercurial/help/hg.1.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/hg.1.txt Mon Jan 22 17:53:02 2018 -0500
@@ -112,7 +112,7 @@
Copying
"""""""
-Copyright (C) 2005-2017 Matt Mackall.
+Copyright (C) 2005-2018 Matt Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
--- a/mercurial/help/hgignore.5.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/hgignore.5.txt Mon Jan 22 17:53:02 2018 -0500
@@ -26,7 +26,7 @@
Copying
=======
This manual page is copyright 2006 Vadim Gelfer.
-Mercurial is copyright 2005-2017 Matt Mackall.
+Mercurial is copyright 2005-2018 Matt Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
--- a/mercurial/help/hgrc.5.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/hgrc.5.txt Mon Jan 22 17:53:02 2018 -0500
@@ -34,7 +34,7 @@
Copying
=======
This manual page is copyright 2005 Bryan O'Sullivan.
-Mercurial is copyright 2005-2017 Matt Mackall.
+Mercurial is copyright 2005-2018 Matt Mackall.
Free use of this software is granted under the terms of the GNU General
Public License version 2 or any later version.
--- a/mercurial/help/internals/wireprotocol.txt Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/help/internals/wireprotocol.txt Mon Jan 22 17:53:02 2018 -0500
@@ -731,6 +731,8 @@
cbattempted
Boolean indicating whether the client attempted to use the *clone bundles*
feature before performing this request.
+bookmarks
+ Boolean indicating whether bookmark data is requested.
phases
Boolean indicating whether phases data is requested.
--- a/mercurial/hg.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hg.py Mon Jan 22 17:53:02 2018 -0500
@@ -14,11 +14,14 @@
import shutil
from .i18n import _
-from .node import nullid
+from .node import (
+ nullid,
+)
from . import (
bookmarks,
bundlerepo,
+ cacheutil,
cmdutil,
destutil,
discovery,
@@ -28,10 +31,10 @@
httppeer,
localrepo,
lock,
+ logexchange,
merge as mergemod,
node,
phases,
- repoview,
scmutil,
sshpeer,
statichttprepo,
@@ -306,16 +309,13 @@
"""
default = defaultpath or sourcerepo.ui.config('paths', 'default')
if default:
- fp = destrepo.vfs("hgrc", "w", text=True)
- fp.write("[paths]\n")
- fp.write("default = %s\n" % default)
- fp.close()
+ template = ('[paths]\n'
+ 'default = %s\n')
+ destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
with destrepo.wlock():
if bookmarks:
- fp = destrepo.vfs('shared', 'w')
- fp.write(sharedbookmarks + '\n')
- fp.close()
+ destrepo.vfs.write('shared', sharedbookmarks + '\n')
def _postshareupdate(repo, update, checkout=None):
"""Maybe perform a working directory update after a shared repo is created.
@@ -459,18 +459,6 @@
os.mkdir(dstcachedir)
util.copyfile(srcbranchcache, dstbranchcache)
-def _cachetocopy(srcrepo):
- """return the list of cache file valuable to copy during a clone"""
- # In local clones we're copying all nodes, not just served
- # ones. Therefore copy all branch caches over.
- cachefiles = ['branch2']
- cachefiles += ['branch2-%s' % f for f in repoview.filtertable]
- cachefiles += ['rbc-names-v1', 'rbc-revs-v1']
- cachefiles += ['tags2']
- cachefiles += ['tags2-%s' % f for f in repoview.filtertable]
- cachefiles += ['hgtagsfnodes1']
- return cachefiles
-
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
update=True, stream=False, branch=None, shareopts=None):
"""Make a copy of an existing repository.
@@ -568,7 +556,7 @@
'unable to resolve identity of remote)\n'))
elif sharenamemode == 'remote':
sharepath = os.path.join(
- sharepool, hashlib.sha1(source).hexdigest())
+ sharepool, node.hex(hashlib.sha1(source).digest()))
else:
raise error.Abort(_('unknown share naming mode: %s') %
sharenamemode)
@@ -629,7 +617,7 @@
util.copyfile(srcbookmarks, dstbookmarks)
dstcachedir = os.path.join(destpath, 'cache')
- for cache in _cachetocopy(srcrepo):
+ for cache in cacheutil.cachetocopy(srcrepo):
_copycache(srcrepo, dstcachedir, cache)
# we need to re-init the repo after manually copying the data
@@ -658,6 +646,9 @@
checkout = revs[0]
local = destpeer.local()
if local:
+ u = util.url(abspath)
+ defaulturl = bytes(u)
+ local.ui.setconfig('paths', 'default', defaulturl, 'clone')
if not stream:
if pull:
stream = False
@@ -680,14 +671,14 @@
destrepo = destpeer.local()
if destrepo:
template = uimod.samplehgrcs['cloned']
- fp = destrepo.vfs("hgrc", "wb")
u = util.url(abspath)
u.passwd = None
defaulturl = bytes(u)
- fp.write(util.tonativeeol(template % defaulturl))
- fp.close()
+ destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
+ destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
- destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
+ if ui.configbool('experimental', 'remotenames'):
+ logexchange.pullremotenames(destrepo, srcpeer)
if update:
if update is not True:
@@ -843,16 +834,32 @@
return ret
-def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None):
+def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
+ abort=False):
"""Branch merge with node, resolving changes. Return true if any
unresolved conflicts."""
- stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce,
- labels=labels)
+ if not abort:
+ stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce,
+ labels=labels)
+ else:
+ ms = mergemod.mergestate.read(repo)
+ if ms.active():
+ # there were conflicts
+ node = ms.localctx.hex()
+ else:
+ # there were no conficts, mergestate was not stored
+ node = repo['.'].hex()
+
+ repo.ui.status(_("aborting the merge, updating back to"
+ " %s\n") % node[:12])
+ stats = mergemod.update(repo, node, branchmerge=False, force=True,
+ labels=labels)
+
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
- "or 'hg update -C .' to abandon\n"))
- elif remind:
+ "or 'hg merge --abort' to abandon\n"))
+ elif remind and not abort:
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
return stats[3] > 0
@@ -912,8 +919,13 @@
return _incoming(display, subreporecurse, ui, repo, source, opts)
def _outgoing(ui, repo, dest, opts):
- dest = ui.expandpath(dest or 'default-push', dest or 'default')
- dest, branches = parseurl(dest, opts.get('branch'))
+ path = ui.paths.getpath(dest, default=('default-push', 'default'))
+ if not path:
+ raise error.Abort(_('default repository not configured!'),
+ hint=_("see 'hg help config.paths'"))
+ dest = path.pushloc or path.loc
+ branches = path.branch, opts.get('branch') or []
+
ui.status(_('comparing with %s\n') % util.hidepassword(dest))
revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
if revs:
--- a/mercurial/hgweb/common.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hgweb/common.py Mon Jan 22 17:53:02 2018 -0500
@@ -75,7 +75,7 @@
if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
- allow = hgweb.configlist('web', 'allow_push')
+ allow = hgweb.configlist('web', 'allow-push')
if not (allow and ismember(hgweb.repo.ui, user, allow)):
raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
--- a/mercurial/hgweb/hgweb_mod.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hgweb/hgweb_mod.py Mon Jan 22 17:53:02 2018 -0500
@@ -114,7 +114,7 @@
self.stripecount = self.configint('web', 'stripes')
self.maxshortchanges = self.configint('web', 'maxshortchanges')
self.maxfiles = self.configint('web', 'maxfiles')
- self.allowpull = self.configbool('web', 'allowpull')
+ self.allowpull = self.configbool('web', 'allow-pull')
# we use untrusted=False to prevent a repo owner from using
# web.templates in .hg/hgrc to get access to any file readable
--- a/mercurial/hgweb/protocol.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hgweb/protocol.py Mon Jan 22 17:53:02 2018 -0500
@@ -102,25 +102,20 @@
urlreq.quote(self.req.env.get('REMOTE_HOST', '')),
urlreq.quote(self.req.env.get('REMOTE_USER', '')))
- def responsetype(self, v1compressible=False):
+ def responsetype(self, prefer_uncompressed):
"""Determine the appropriate response type and compression settings.
- The ``v1compressible`` argument states whether the response with
- application/mercurial-0.1 media types should be zlib compressed.
-
Returns a tuple of (mediatype, compengine, engineopts).
"""
- # For now, if it isn't compressible in the old world, it's never
- # compressible. We can change this to send uncompressed 0.2 payloads
- # later.
- if not v1compressible:
- return HGTYPE, None, None
-
# Determine the response media type and compression engine based
# on the request parameters.
protocaps = decodevaluefromheaders(self.req, r'X-HgProto').split(' ')
if '0.2' in protocaps:
+ # All clients are expected to support uncompressed data.
+ if prefer_uncompressed:
+ return HGTYPE2, util._noopengine(), {}
+
# Default as defined by wire protocol spec.
compformats = ['zlib', 'none']
for cap in protocaps:
@@ -155,7 +150,7 @@
def call(repo, req, cmd):
p = webproto(req, repo.ui)
- def genversion2(gen, compress, engine, engineopts):
+ def genversion2(gen, engine, engineopts):
# application/mercurial-0.2 always sends a payload header
# identifying the compression engine.
name = engine.wireprotosupport().name
@@ -163,31 +158,27 @@
yield struct.pack('B', len(name))
yield name
- if compress:
- for chunk in engine.compressstream(gen, opts=engineopts):
- yield chunk
- else:
- for chunk in gen:
- yield chunk
+ for chunk in gen:
+ yield chunk
rsp = wireproto.dispatch(repo, p, cmd)
if isinstance(rsp, bytes):
req.respond(HTTP_OK, HGTYPE, body=rsp)
return []
+ elif isinstance(rsp, wireproto.streamres_legacy):
+ gen = rsp.gen
+ req.respond(HTTP_OK, HGTYPE)
+ return gen
elif isinstance(rsp, wireproto.streamres):
- if rsp.reader:
- gen = iter(lambda: rsp.reader.read(32768), '')
- else:
- gen = rsp.gen
+ gen = rsp.gen
# This code for compression should not be streamres specific. It
# is here because we only compress streamres at the moment.
- mediatype, engine, engineopts = p.responsetype(rsp.v1compressible)
+ mediatype, engine, engineopts = p.responsetype(rsp.prefer_uncompressed)
+ gen = engine.compressstream(gen, engineopts)
- if mediatype == HGTYPE and rsp.v1compressible:
- gen = engine.compressstream(gen, engineopts)
- elif mediatype == HGTYPE2:
- gen = genversion2(gen, rsp.v1compressible, engine, engineopts)
+ if mediatype == HGTYPE2:
+ gen = genversion2(gen, engine, engineopts)
req.respond(HTTP_OK, mediatype)
return gen
--- a/mercurial/hgweb/webcommands.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hgweb/webcommands.py Mon Jan 22 17:53:02 2018 -0500
@@ -13,7 +13,7 @@
import re
from ..i18n import _
-from ..node import hex, short
+from ..node import hex, nullid, short
from .common import (
ErrorResponse,
@@ -36,9 +36,7 @@
revsetlang,
scmutil,
smartset,
- templatefilters,
templater,
- url,
util,
)
@@ -415,7 +413,7 @@
else:
nextentry = []
- return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
+ return tmpl('shortlog' if shortlog else 'changelog', changenav=changenav,
node=ctx.hex(), rev=pos, symrev=symrev, changesets=count,
entries=entries,
latestentry=latestentry, nextentry=nextentry,
@@ -1178,11 +1176,16 @@
Information rendered by this handler can be used to create visual
representations of repository topology.
- The ``revision`` URL parameter controls the starting changeset.
+ The ``revision`` URL parameter controls the starting changeset. If it's
+ absent, the default is ``tip``.
The ``revcount`` query string argument can define the number of changesets
to show information for.
+ The ``graphtop`` query string argument can specify the starting changeset
+ for producing ``jsdata`` variable that is used for rendering graph in
+ JavaScript. By default it has the same value as ``revision``.
+
This handler will render the ``graph`` template.
"""
@@ -1209,6 +1212,10 @@
morevars = copy.copy(tmpl.defaults['sessionvars'])
morevars['revcount'] = revcount * 2
+ graphtop = req.form.get('graphtop', [ctx.hex()])[0]
+ graphvars = copy.copy(tmpl.defaults['sessionvars'])
+ graphvars['graphtop'] = graphtop
+
count = len(web.repo)
pos = rev
@@ -1217,94 +1224,97 @@
changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
tree = []
+ nextentry = []
+ lastrev = 0
if pos != -1:
allrevs = web.repo.changelog.revs(pos, 0)
revs = []
for i in allrevs:
revs.append(i)
- if len(revs) >= revcount:
+ if len(revs) >= revcount + 1:
break
+ if len(revs) > revcount:
+ nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])]
+ revs = revs[:-1]
+
+ lastrev = revs[-1]
+
# We have to feed a baseset to dagwalker as it is expecting smartset
# object. This does not have a big impact on hgweb performance itself
# since hgweb graphing code is not itself lazy yet.
dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
# As we said one line above... not lazy.
- tree = list(graphmod.colored(dag, web.repo))
-
- def getcolumns(tree):
- cols = 0
- for (id, type, ctx, vtx, edges) in tree:
- if type != graphmod.CHANGESET:
- continue
- cols = max(cols, max([edge[0] for edge in edges] or [0]),
- max([edge[1] for edge in edges] or [0]))
- return cols
-
- def graphdata(usetuples, encodestr):
- data = []
+ tree = list(item for item in graphmod.colored(dag, web.repo)
+ if item[1] == graphmod.CHANGESET)
- row = 0
- for (id, type, ctx, vtx, edges) in tree:
- if type != graphmod.CHANGESET:
- continue
- node = pycompat.bytestr(ctx)
- age = encodestr(templatefilters.age(ctx.date()))
- desc = templatefilters.firstline(encodestr(ctx.description()))
- desc = url.escape(templatefilters.nonempty(desc))
- user = url.escape(templatefilters.person(encodestr(ctx.user())))
- branch = url.escape(encodestr(ctx.branch()))
- try:
- branchnode = web.repo.branchtip(branch)
- except error.RepoLookupError:
- branchnode = None
- branch = branch, branchnode == ctx.node()
+ def nodecurrent(ctx):
+ wpnodes = web.repo.dirstate.parents()
+ if wpnodes[1] == nullid:
+ wpnodes = wpnodes[:1]
+ if ctx.node() in wpnodes:
+ return '@'
+ return ''
+
+ def nodesymbol(ctx):
+ if ctx.obsolete():
+ return 'x'
+ elif ctx.isunstable():
+ return '*'
+ elif ctx.closesbranch():
+ return '_'
+ else:
+ return 'o'
- if usetuples:
- data.append((node, vtx, edges, desc, user, age, branch,
- [url.escape(encodestr(x)) for x in ctx.tags()],
- [url.escape(encodestr(x))
- for x in ctx.bookmarks()]))
- else:
- edgedata = [{'col': edge[0], 'nextcol': edge[1],
- 'color': (edge[2] - 1) % 6 + 1,
- 'width': edge[3], 'bcolor': edge[4]}
- for edge in edges]
+ def fulltree():
+ pos = web.repo[graphtop].rev()
+ tree = []
+ if pos != -1:
+ revs = web.repo.changelog.revs(pos, lastrev)
+ dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
+ tree = list(item for item in graphmod.colored(dag, web.repo)
+ if item[1] == graphmod.CHANGESET)
+ return tree
+
+ def jsdata():
+ return [{'node': pycompat.bytestr(ctx),
+ 'graphnode': nodecurrent(ctx) + nodesymbol(ctx),
+ 'vertex': vtx,
+ 'edges': edges}
+ for (id, type, ctx, vtx, edges) in fulltree()]
- data.append(
- {'node': node,
- 'col': vtx[0],
- 'color': (vtx[1] - 1) % 6 + 1,
- 'edges': edgedata,
- 'row': row,
- 'nextrow': row + 1,
- 'desc': desc,
- 'user': user,
- 'age': age,
- 'bookmarks': webutil.nodebookmarksdict(
- web.repo, ctx.node()),
- 'branches': webutil.nodebranchdict(web.repo, ctx),
- 'inbranch': webutil.nodeinbranch(web.repo, ctx),
- 'tags': webutil.nodetagsdict(web.repo, ctx.node())})
+ def nodes():
+ parity = paritygen(web.stripecount)
+ for row, (id, type, ctx, vtx, edges) in enumerate(tree):
+ entry = webutil.commonentry(web.repo, ctx)
+ edgedata = [{'col': edge[0],
+ 'nextcol': edge[1],
+ 'color': (edge[2] - 1) % 6 + 1,
+ 'width': edge[3],
+ 'bcolor': edge[4]}
+ for edge in edges]
- row += 1
-
- return data
+ entry.update({'col': vtx[0],
+ 'color': (vtx[1] - 1) % 6 + 1,
+ 'parity': next(parity),
+ 'edges': edgedata,
+ 'row': row,
+ 'nextrow': row + 1})
- cols = getcolumns(tree)
+ yield entry
+
rows = len(tree)
- canvasheight = (rows + 1) * bg_height - 27
return tmpl('graph', rev=rev, symrev=symrev, revcount=revcount,
uprev=uprev,
lessvars=lessvars, morevars=morevars, downrev=downrev,
- cols=cols, rows=rows,
- canvaswidth=(cols + 1) * bg_height,
- truecanvasheight=rows * bg_height,
- canvasheight=canvasheight, bg_height=bg_height,
- # {jsdata} will be passed to |json, so it must be in utf-8
- jsdata=lambda **x: graphdata(True, encoding.fromlocal),
- nodes=lambda **x: graphdata(False, pycompat.bytestr),
+ graphvars=graphvars,
+ rows=rows,
+ bg_height=bg_height,
+ changesets=count,
+ nextentry=nextentry,
+ jsdata=lambda **x: jsdata(),
+ nodes=lambda **x: nodes(),
node=ctx.hex(), changenav=changenav)
def _getdoc(e):
--- a/mercurial/hgweb/webutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hgweb/webutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,6 +32,7 @@
pathutil,
pycompat,
templatefilters,
+ templatekw,
ui as uimod,
util,
)
@@ -351,6 +352,12 @@
def formatlinerange(fromline, toline):
return '%d:%d' % (fromline + 1, toline)
+def succsandmarkers(repo, ctx):
+ for item in templatekw.showsuccsandmarkers(repo, ctx):
+ item['successors'] = _siblings(repo[successor]
+ for successor in item['successors'])
+ yield item
+
def commonentry(repo, ctx):
node = ctx.node()
return {
@@ -361,6 +368,9 @@
'date': ctx.date(),
'extra': ctx.extra(),
'phase': ctx.phasestr(),
+ 'obsolete': ctx.obsolete(),
+ 'succsandmarkers': lambda **x: succsandmarkers(repo, ctx),
+ 'instabilities': [{"instability": i} for i in ctx.instabilities()],
'branch': nodebranchnodefault(ctx),
'inbranch': nodeinbranch(repo, ctx),
'branches': nodebranchdict(repo, ctx),
@@ -409,7 +419,7 @@
files = []
parity = paritygen(web.stripecount)
for blockno, f in enumerate(ctx.files()):
- template = f in ctx and 'filenodelink' or 'filenolink'
+ template = 'filenodelink' if f in ctx else 'filenolink'
files.append(tmpl(template,
node=ctx.hex(), file=f, blockno=blockno + 1,
parity=next(parity)))
@@ -571,7 +581,7 @@
fileno = 0
for filename, adds, removes, isbinary in stats:
- template = filename in files and 'diffstatlink' or 'diffstatnolink'
+ template = 'diffstatlink' if filename in files else 'diffstatnolink'
total = adds + removes
fileno += 1
yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
--- a/mercurial/hook.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/hook.py Mon Jan 22 17:53:02 2018 -0500
@@ -91,7 +91,7 @@
starttime = util.timer()
try:
- r = obj(ui=ui, repo=repo, hooktype=htype, **args)
+ r = obj(ui=ui, repo=repo, hooktype=htype, **pycompat.strkwargs(args))
except Exception as exc:
if isinstance(exc, error.Abort):
ui.warn(_('error: %s hook failed: %s\n') %
--- a/mercurial/httpconnection.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/httpconnection.py Mon Jan 22 17:53:02 2018 -0500
@@ -248,7 +248,7 @@
return self.https_open(req)
def makehttpcon(*args, **kwargs):
k2 = dict(kwargs)
- k2['use_ssl'] = False
+ k2[r'use_ssl'] = False
return HTTPConnection(*args, **k2)
return self.do_open(makehttpcon, req, False)
@@ -288,8 +288,8 @@
if '[' in host:
host = host[1:-1]
- kwargs['keyfile'] = keyfile
- kwargs['certfile'] = certfile
+ kwargs[r'keyfile'] = keyfile
+ kwargs[r'certfile'] = certfile
con = HTTPConnection(host, port, use_ssl=True,
ssl_wrap_socket=sslutil.wrapsocket,
--- a/mercurial/httppeer.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/httppeer.py Mon Jan 22 17:53:02 2018 -0500
@@ -161,6 +161,41 @@
h.close()
getattr(h, "close_all", lambda: None)()
+ def _openurl(self, req):
+ if (self._ui.debugflag
+ and self._ui.configbool('devel', 'debug.peer-request')):
+ dbg = self._ui.debug
+ line = 'devel-peer-request: %s\n'
+ dbg(line % '%s %s' % (req.get_method(), req.get_full_url()))
+ hgargssize = None
+
+ for header, value in sorted(req.header_items()):
+ if header.startswith('X-hgarg-'):
+ if hgargssize is None:
+ hgargssize = 0
+ hgargssize += len(value)
+ else:
+ dbg(line % ' %s %s' % (header, value))
+
+ if hgargssize is not None:
+ dbg(line % ' %d bytes of commands arguments in headers'
+ % hgargssize)
+
+ if req.has_data():
+ data = req.get_data()
+ length = getattr(data, 'length', None)
+ if length is None:
+ length = len(data)
+ dbg(line % ' %d bytes of data' % length)
+
+ start = util.timer()
+
+ ret = self._urlopener.open(req)
+ if self._ui.configbool('devel', 'debug.peer-request'):
+ dbg(line % ' finished in %.4f seconds (%s)'
+ % (util.timer() - start, ret.code))
+ return ret
+
# Begin of _basepeer interface.
@util.propertycache
@@ -204,6 +239,7 @@
self._caps = set(self._call('capabilities').split())
def _callstream(self, cmd, _compressible=False, **args):
+ args = pycompat.byteskwargs(args)
if cmd == 'pushkey':
args['data'] = ''
data = args.pop('data', None)
@@ -222,7 +258,7 @@
if not data:
data = strargs
else:
- if isinstance(data, basestring):
+ if isinstance(data, bytes):
i = io.BytesIO(data)
i.length = len(data)
data = i
@@ -297,7 +333,7 @@
self.ui.debug("sending %s bytes\n" % size)
req.add_unredirected_header('Content-Length', '%d' % size)
try:
- resp = self._urlopener.open(req)
+ resp = self._openurl(req)
except urlerr.httperror as inst:
if inst.code == 401:
raise error.Abort(_('authorization failed'))
--- a/mercurial/keepalive.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/keepalive.py Mon Jan 22 17:53:02 2018 -0500
@@ -92,6 +92,7 @@
from .i18n import _
from . import (
+ node,
pycompat,
urllibcompat,
util,
@@ -322,7 +323,7 @@
data = urllibcompat.getdata(req)
h.putrequest(
req.get_method(), urllibcompat.getselector(req),
- **skipheaders)
+ **pycompat.strkwargs(skipheaders))
if 'content-type' not in headers:
h.putheader('Content-type',
'application/x-www-form-urlencoded')
@@ -331,7 +332,7 @@
else:
h.putrequest(
req.get_method(), urllibcompat.getselector(req),
- **skipheaders)
+ **pycompat.strkwargs(skipheaders))
except socket.error as err:
raise urlerr.urlerror(err)
for k, v in headers.items():
@@ -366,8 +367,8 @@
def __init__(self, sock, debuglevel=0, strict=0, method=None):
extrakw = {}
if not pycompat.ispy3:
- extrakw['strict'] = True
- extrakw['buffering'] = True
+ extrakw[r'strict'] = True
+ extrakw[r'buffering'] = True
httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
method=method, **extrakw)
self.fileno = sock.fileno
@@ -607,7 +608,7 @@
foo = fo.read()
fo.close()
m = md5(foo)
- print(format % ('normal urllib', m.hexdigest()))
+ print(format % ('normal urllib', node.hex(m.digest())))
# now install the keepalive handler and try again
opener = urlreq.buildopener(HTTPHandler())
@@ -617,7 +618,7 @@
foo = fo.read()
fo.close()
m = md5(foo)
- print(format % ('keepalive read', m.hexdigest()))
+ print(format % ('keepalive read', node.hex(m.digest())))
fo = urlreq.urlopen(url)
foo = ''
@@ -629,7 +630,7 @@
break
fo.close()
m = md5(foo)
- print(format % ('keepalive readline', m.hexdigest()))
+ print(format % ('keepalive readline', node.hex(m.digest())))
def comp(N, url):
print(' making %i connections to:\n %s' % (N, url))
--- a/mercurial/localrepo.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/localrepo.py Mon Jan 22 17:53:02 2018 -0500
@@ -197,7 +197,7 @@
**kwargs):
chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
common=common, bundlecaps=bundlecaps,
- **kwargs)
+ **kwargs)[1]
cb = util.chunkbuffer(chunks)
if exchange.bundle2requested(bundlecaps):
@@ -364,11 +364,14 @@
self.root = self.wvfs.base
self.path = self.wvfs.join(".hg")
self.origroot = path
- # These auditor are not used by the vfs,
- # only used when writing this comment: basectx.match
- self.auditor = pathutil.pathauditor(self.root, self._checknested)
- self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
- realfs=False, cached=True)
+ # This is only used by context.workingctx.match in order to
+ # detect files in subrepos.
+ self.auditor = pathutil.pathauditor(
+ self.root, callback=self._checknested)
+ # This is only used by context.basectx.match in order to detect
+ # files in subrepos.
+ self.nofsauditor = pathutil.pathauditor(
+ self.root, callback=self._checknested, realfs=False, cached=True)
self.baseui = baseui
self.ui = baseui.copy()
self.ui.copy = baseui.copy # prevent copying repo configuration
@@ -499,9 +502,6 @@
# post-dirstate-status hooks
self._postdsstatus = []
- # Cache of types representing filtered repos.
- self._filteredrepotypes = weakref.WeakKeyDictionary()
-
# generic mapping between names and nodes
self.names = namespaces.namespaces()
@@ -577,7 +577,8 @@
def _restrictcapabilities(self, caps):
if self.ui.configbool('experimental', 'bundle2-advertise'):
caps = set(caps)
- capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
+ capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
+ role='client'))
caps.add('bundle2=' + urlreq.quote(capsblob))
return caps
@@ -675,23 +676,10 @@
Intended to be overwritten by filtered repo."""
return self
- def filtered(self, name):
+ def filtered(self, name, visibilityexceptions=None):
"""Return a filtered version of a repository"""
- # Python <3.4 easily leaks types via __mro__. See
- # https://bugs.python.org/issue17950. We cache dynamically
- # created types so this method doesn't leak on every
- # invocation.
-
- key = self.unfiltered().__class__
- if key not in self._filteredrepotypes:
- # Build a new type with the repoview mixin and the base
- # class of this repo. Give it a name containing the
- # filter name to aid debugging.
- bases = (repoview.repoview, key)
- cls = type(r'%sfilteredrepo' % name, bases, {})
- self._filteredrepotypes[key] = cls
-
- return self._filteredrepotypes[key](self, name)
+ cls = repoview.newtype(self.unfiltered().__class__)
+ return cls(self, name, visibilityexceptions)
@repofilecache('bookmarks', 'bookmarks.current')
def _bookmarks(self):
@@ -701,8 +689,8 @@
def _activebookmark(self):
return self._bookmarks.active
- # _phaserevs and _phasesets depend on changelog. what we need is to
- # call _phasecache.invalidate() if '00changelog.i' was changed, but it
+ # _phasesets depend on changelog. what we need is to call
+ # _phasecache.invalidate() if '00changelog.i' was changed, but it
# can't be easily expressed in filecache mechanism.
@storecache('phaseroots', '00changelog.i')
def _phasecache(self):
@@ -775,7 +763,9 @@
__bool__ = __nonzero__
def __len__(self):
- return len(self.changelog)
+ # no need to pay the cost of repoview.changelog
+ unfi = self.unfiltered()
+ return len(unfi.changelog)
def __iter__(self):
return iter(self.changelog)
@@ -1112,7 +1102,7 @@
data = self.wvfs.read(filename)
return self._filter(self._encodefilterpats, filename, data)
- def wwrite(self, filename, data, flags, backgroundclose=False):
+ def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
"""write ``data`` into ``filename`` in the working directory
This returns length of written (maybe decoded) data.
@@ -1121,9 +1111,12 @@
if 'l' in flags:
self.wvfs.symlink(data, filename)
else:
- self.wvfs.write(filename, data, backgroundclose=backgroundclose)
+ self.wvfs.write(filename, data, backgroundclose=backgroundclose,
+ **kwargs)
if 'x' in flags:
self.wvfs.setflags(filename, False, True)
+ else:
+ self.wvfs.setflags(filename, False, False)
return len(data)
def wwritedata(self, filename, data):
@@ -1147,7 +1140,6 @@
raise error.ProgrammingError('transaction requires locking')
tr = self.currenttransaction()
if tr is not None:
- scmutil.registersummarycallback(self, tr, desc)
return tr.nest()
# abort here if the journal already exists
@@ -1244,6 +1236,8 @@
# gating.
tracktags(tr2)
repo = reporef()
+ if repo.ui.configbool('experimental', 'single-head-per-branch'):
+ scmutil.enforcesinglehead(repo, tr2, desc)
if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
args = tr.hookargs.copy()
@@ -1286,7 +1280,7 @@
validator=validate,
releasefn=releasefn,
checkambigfiles=_cachedfiles)
- tr.changes['revs'] = set()
+ tr.changes['revs'] = xrange(0, 0)
tr.changes['obsmarkers'] = set()
tr.changes['phases'] = {}
tr.changes['bookmarks'] = {}
@@ -1329,7 +1323,11 @@
**pycompat.strkwargs(hookargs))
reporef()._afterlock(hookfunc)
tr.addfinalize('txnclose-hook', txnclosehook)
- tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
+ # Include a leading "-" to make it happen before the transaction summary
+ # reports registered via scmutil.registersummarycallback() whose names
+ # are 00-txnreport etc. That way, the caches will be warm when the
+ # callbacks run.
+ tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
def txnaborthook(tr2):
"""To be run if transaction is aborted
"""
@@ -1587,29 +1585,18 @@
# determine whether it can be inherited
if parentenvvar is not None:
parentlock = encoding.environ.get(parentenvvar)
- try:
- l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
- acquirefn=acquirefn, desc=desc,
- inheritchecker=inheritchecker,
- parentlock=parentlock)
- except error.LockHeld as inst:
- if not wait:
- raise
- # show more details for new-style locks
- if ':' in inst.locker:
- host, pid = inst.locker.split(":", 1)
- self.ui.warn(
- _("waiting for lock on %s held by process %r "
- "on host %r\n") % (desc, pid, host))
- else:
- self.ui.warn(_("waiting for lock on %s held by %r\n") %
- (desc, inst.locker))
- # default to 600 seconds timeout
- l = lockmod.lock(vfs, lockname,
- int(self.ui.config("ui", "timeout")),
- releasefn=releasefn, acquirefn=acquirefn,
- desc=desc)
- self.ui.warn(_("got lock after %s seconds\n") % l.delay)
+
+ timeout = 0
+ warntimeout = 0
+ if wait:
+ timeout = self.ui.configint("ui", "timeout")
+ warntimeout = self.ui.configint("ui", "timeout.warn")
+
+ l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
+ releasefn=releasefn,
+ acquirefn=acquirefn, desc=desc,
+ inheritchecker=inheritchecker,
+ parentlock=parentlock)
return l
def _afterlock(self, callback):
--- a/mercurial/lock.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/lock.py Mon Jan 22 17:53:02 2018 -0500
@@ -14,6 +14,8 @@
import time
import warnings
+from .i18n import _
+
from . import (
encoding,
error,
@@ -39,6 +41,58 @@
raise
return result
+def trylock(ui, vfs, lockname, timeout, warntimeout, *args, **kwargs):
+ """return an acquired lock or raise an a LockHeld exception
+
+ This function is responsible to issue warnings and or debug messages about
+ the held lock while trying to acquires it."""
+
+ def printwarning(printer, locker):
+ """issue the usual "waiting on lock" message through any channel"""
+ # show more details for new-style locks
+ if ':' in locker:
+ host, pid = locker.split(":", 1)
+ msg = _("waiting for lock on %s held by process %r "
+ "on host %r\n") % (l.desc, pid, host)
+ else:
+ msg = _("waiting for lock on %s held by %r\n") % (l.desc, locker)
+ printer(msg)
+
+ l = lock(vfs, lockname, 0, *args, dolock=False, **kwargs)
+
+ debugidx = 0 if (warntimeout and timeout) else -1
+ warningidx = 0
+ if not timeout:
+ warningidx = -1
+ elif warntimeout:
+ warningidx = warntimeout
+
+ delay = 0
+ while True:
+ try:
+ l._trylock()
+ break
+ except error.LockHeld as inst:
+ if delay == debugidx:
+ printwarning(ui.debug, inst.locker)
+ if delay == warningidx:
+ printwarning(ui.warn, inst.locker)
+ if timeout <= delay:
+ raise error.LockHeld(errno.ETIMEDOUT, inst.filename,
+ l.desc, inst.locker)
+ time.sleep(1)
+ delay += 1
+
+ l.delay = delay
+ if l.delay:
+ if 0 <= warningidx <= l.delay:
+ ui.warn(_("got lock after %s seconds\n") % l.delay)
+ else:
+ ui.debug("got lock after %s seconds\n" % l.delay)
+ if l.acquirefn:
+ l.acquirefn()
+ return l
+
class lock(object):
'''An advisory lock held by one process to control access to a set
of files. Non-cooperating processes or incorrectly written scripts
@@ -60,7 +114,8 @@
_host = None
def __init__(self, vfs, file, timeout=-1, releasefn=None, acquirefn=None,
- desc=None, inheritchecker=None, parentlock=None):
+ desc=None, inheritchecker=None, parentlock=None,
+ dolock=True):
self.vfs = vfs
self.f = file
self.held = 0
@@ -74,9 +129,10 @@
self._inherited = False
self.postrelease = []
self.pid = self._getpid()
- self.delay = self.lock()
- if self.acquirefn:
- self.acquirefn()
+ if dolock:
+ self.delay = self.lock()
+ if self.acquirefn:
+ self.acquirefn()
def __enter__(self):
return self
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/logexchange.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,118 @@
+# logexchange.py
+#
+# Copyright 2017 Augie Fackler <raf@durin42.com>
+# Copyright 2017 Sean Farley <sean@farley.io>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from .node import hex
+
+from . import (
+ vfs as vfsmod,
+)
+
+# directory name in .hg/ in which remotenames files will be present
+remotenamedir = 'logexchange'
+
+def readremotenamefile(repo, filename):
+ """
+ reads a file from .hg/logexchange/ directory and yields it's content
+ filename: the file to be read
+ yield a tuple (node, remotepath, name)
+ """
+
+ vfs = vfsmod.vfs(repo.vfs.join(remotenamedir))
+ if not vfs.exists(filename):
+ return
+ f = vfs(filename)
+ lineno = 0
+ for line in f:
+ line = line.strip()
+ if not line:
+ continue
+ # contains the version number
+ if lineno == 0:
+ lineno += 1
+ try:
+ node, remote, rname = line.split('\0')
+ yield node, remote, rname
+ except ValueError:
+ pass
+
+ f.close()
+
+def readremotenames(repo):
+ """
+ read the details about the remotenames stored in .hg/logexchange/ and
+ yields a tuple (node, remotepath, name). It does not yields information
+ about whether an entry yielded is branch or bookmark. To get that
+ information, call the respective functions.
+ """
+
+ for bmentry in readremotenamefile(repo, 'bookmarks'):
+ yield bmentry
+ for branchentry in readremotenamefile(repo, 'branches'):
+ yield branchentry
+
+def writeremotenamefile(repo, remotepath, names, nametype):
+ vfs = vfsmod.vfs(repo.vfs.join(remotenamedir))
+ f = vfs(nametype, 'w', atomictemp=True)
+ # write the storage version info on top of file
+ # version '0' represents the very initial version of the storage format
+ f.write('0\n\n')
+
+ olddata = set(readremotenamefile(repo, nametype))
+ # re-save the data from a different remote than this one.
+ for node, oldpath, rname in sorted(olddata):
+ if oldpath != remotepath:
+ f.write('%s\0%s\0%s\n' % (node, oldpath, rname))
+
+ for name, node in sorted(names.iteritems()):
+ if nametype == "branches":
+ for n in node:
+ f.write('%s\0%s\0%s\n' % (n, remotepath, name))
+ elif nametype == "bookmarks":
+ if node:
+ f.write('%s\0%s\0%s\n' % (node, remotepath, name))
+
+ f.close()
+
+def saveremotenames(repo, remotepath, branches=None, bookmarks=None):
+ """
+ save remotenames i.e. remotebookmarks and remotebranches in their
+ respective files under ".hg/logexchange/" directory.
+ """
+ wlock = repo.wlock()
+ try:
+ if bookmarks:
+ writeremotenamefile(repo, remotepath, bookmarks, 'bookmarks')
+ if branches:
+ writeremotenamefile(repo, remotepath, branches, 'branches')
+ finally:
+ wlock.release()
+
+def pullremotenames(localrepo, remoterepo):
+ """
+ pulls bookmarks and branches information of the remote repo during a
+ pull or clone operation.
+ localrepo is our local repository
+ remoterepo is the peer instance
+ """
+ remotepath = remoterepo.url()
+ bookmarks = remoterepo.listkeys('bookmarks')
+ # on a push, we don't want to keep obsolete heads since
+ # they won't show up as heads on the next pull, so we
+ # remove them here otherwise we would require the user
+ # to issue a pull to refresh the storage
+ bmap = {}
+ repo = localrepo.unfiltered()
+ for branch, nodes in remoterepo.branchmap().iteritems():
+ bmap[branch] = []
+ for node in nodes:
+ if node in repo and not repo[node].obsolete():
+ bmap[branch].append(hex(node))
+
+ saveremotenames(localrepo, remotepath, bmap, bookmarks)
--- a/mercurial/mail.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/mail.py Mon Jan 22 17:53:02 2018 -0500
@@ -152,7 +152,7 @@
fp = open(mbox, 'ab+')
# Should be time.asctime(), but Windows prints 2-characters day
# of month instead of one. Make them print the same thing.
- date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
+ date = time.strftime(r'%a %b %d %H:%M:%S %Y', time.localtime())
fp.write('From %s %s\n' % (sender, date))
fp.write(msg)
fp.write('\n\n')
--- a/mercurial/manifest.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/manifest.py Mon Jan 22 17:53:02 2018 -0500
@@ -810,7 +810,7 @@
if p in self._files:
yield self._subpath(p)
else:
- for f in self._dirs[p].iterkeys():
+ for f in self._dirs[p]:
yield f
def keys(self):
--- a/mercurial/match.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/match.py Mon Jan 22 17:53:02 2018 -0500
@@ -305,9 +305,6 @@
Returns the string 'all' if the given directory and all subdirectories
should be visited. Otherwise returns True or False indicating whether
the given directory should be visited.
-
- This function's behavior is undefined if it has returned False for
- one of the dir's parent directories.
'''
return True
@@ -460,17 +457,10 @@
class differencematcher(basematcher):
'''Composes two matchers by matching if the first matches and the second
- does not. Well, almost... If the user provides a pattern like "-X foo foo",
- Mercurial actually does match "foo" against that. That's because exact
- matches are treated specially. So, since this differencematcher is used for
- excludes, it needs to special-case exact matching.
+ does not.
The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
traversedir) are ignored.
-
- TODO: If we want to keep the behavior described above for exact matches, we
- should consider instead treating the above case something like this:
- union(exact(foo), difference(pattern(foo), include(foo)))
'''
def __init__(self, m1, m2):
super(differencematcher, self).__init__(m1._root, m1._cwd)
@@ -481,7 +471,7 @@
self.traversedir = m1.traversedir
def matchfn(self, f):
- return self._m1(f) and (not self._m2(f) or self._m1.exact(f))
+ return self._m1(f) and not self._m2(f)
@propertycache
def _files(self):
@@ -496,9 +486,6 @@
def visitdir(self, dir):
if self._m2.visitdir(dir) == 'all':
- # There's a bug here: If m1 matches file 'dir/file' and m2 excludes
- # 'dir' (recursively), we should still visit 'dir' due to the
- # exception we have for exact matches.
return False
return bool(self._m1.visitdir(dir))
--- a/mercurial/mdiff.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/mdiff.py Mon Jan 22 17:53:02 2018 -0500
@@ -67,6 +67,7 @@
'ignoreblanklines': False,
'upgrade': False,
'showsimilarity': False,
+ 'worddiff': False,
}
def __init__(self, **opts):
@@ -99,7 +100,7 @@
if blank and opts.ignoreblanklines:
text = re.sub('\n+', '\n', text).strip('\n')
if opts.ignorewseol:
- text = re.sub(r'[ \t\r\f]+\n', r'\n', text)
+ text = re.sub(br'[ \t\r\f]+\n', r'\n', text)
return text
def splitblock(base1, lines1, base2, lines2, opts):
@@ -355,7 +356,7 @@
# the previous hunk context until we find a line starting with an
# alphanumeric char.
for i in xrange(astart - 1, lastpos - 1, -1):
- if l1[i][0].isalnum():
+ if l1[i][0:1].isalnum():
func = ' ' + l1[i].rstrip()[:40]
lastfunc[1] = func
break
--- a/mercurial/merge.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/merge.py Mon Jan 22 17:53:02 2018 -0500
@@ -646,6 +646,14 @@
return config
def _checkunknownfile(repo, wctx, mctx, f, f2=None):
+ if wctx.isinmemory():
+ # Nothing to do in IMM because nothing in the "working copy" can be an
+ # unknown file.
+ #
+ # Note that we should bail out here, not in ``_checkunknownfiles()``,
+ # because that function does other useful work.
+ return False
+
if f2 is None:
f2 = f
return (repo.wvfs.audit.check(f)
@@ -674,7 +682,11 @@
# updated with any new dirs that are checked and found to be absent.
self._missingdircache = set()
- def __call__(self, repo, f):
+ def __call__(self, repo, wctx, f):
+ if wctx.isinmemory():
+ # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
+ return False
+
# Check for path prefixes that exist as unknown files.
for p in reversed(list(util.finddirs(f))):
if p in self._missingdircache:
@@ -726,7 +738,7 @@
if _checkunknownfile(repo, wctx, mctx, f):
fileconflicts.add(f)
elif pathconfig and f not in wctx:
- path = checkunknowndirs(repo, f)
+ path = checkunknowndirs(repo, wctx, f)
if path is not None:
pathconflicts.add(path)
elif m == 'dg':
@@ -1333,10 +1345,6 @@
repo.ui.warn(_("current directory was removed\n"
"(consider changing to repo root: %s)\n") % repo.root)
- # It's necessary to flush here in case we're inside a worker fork and will
- # quit after this function.
- wctx.flushall()
-
def batchget(repo, mctx, wctx, actions):
"""apply gets to the working directory
@@ -1368,7 +1376,9 @@
if repo.wvfs.lexists(absf):
util.rename(absf, orig)
wctx[f].clearunknown()
- wctx[f].write(fctx(f).data(), flags, backgroundclose=True)
+ atomictemp = ui.configbool("experimental", "update.atomic-file")
+ wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
+ atomictemp=atomictemp)
if i == 100:
yield i, f
i = 0
@@ -1376,9 +1386,6 @@
if i > 0:
yield i, f
- # It's necessary to flush here in case we're inside a worker fork and will
- # quit after this function.
- wctx.flushall()
def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
"""apply the merge action list to the working directory
@@ -1479,10 +1486,6 @@
z += 1
progress(_updating, z, item=f, total=numupdates, unit=_files)
- # We should flush before forking into worker processes, since those workers
- # flush when they complete, and we don't want to duplicate work.
- wctx.flushall()
-
# get in parallel
prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
actions['g'])
@@ -1555,6 +1558,9 @@
usemergedriver = not overwrite and mergeactions and ms.mergedriver
if usemergedriver:
+ if wctx.isinmemory():
+ raise error.InMemoryMergeConflictsError("in-memory merge does not "
+ "support mergedriver")
ms.commit()
proceed = driverpreprocess(repo, ms, wctx, labels=labels)
# the driver might leave some files unresolved
@@ -1850,8 +1856,9 @@
if not force and (wc.files() or wc.deleted()):
raise error.Abort(_("uncommitted changes"),
hint=_("use 'hg status' to list changes"))
- for s in sorted(wc.substate):
- wc.sub(s).bailifchanged()
+ if not wc.isinmemory():
+ for s in sorted(wc.substate):
+ wc.sub(s).bailifchanged()
elif not overwrite:
if p1 == p2: # no-op update
@@ -1966,7 +1973,7 @@
### apply phase
if not branchmerge: # just jump to the new rev
fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
- if not partial:
+ if not partial and not wc.isinmemory():
repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
# note that we're in the middle of an update
repo.vfs.write('updatestate', p2.hex())
@@ -2004,9 +2011,8 @@
'see "hg help -e fsmonitor")\n'))
stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
- wc.flushall()
- if not partial:
+ if not partial and not wc.isinmemory():
with repo.dirstate.parentchange():
repo.setparents(fp1, fp2)
recordupdates(repo, actions, branchmerge)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/minifileset.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,85 @@
+# minifileset.py - a simple language to select files
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from .i18n import _
+from . import (
+ error,
+ fileset,
+)
+
+def _compile(tree):
+ if not tree:
+ raise error.ParseError(_("missing argument"))
+ op = tree[0]
+ if op in {'symbol', 'string', 'kindpat'}:
+ name = fileset.getpattern(tree, {'path'}, _('invalid file pattern'))
+ if name.startswith('**'): # file extension test, ex. "**.tar.gz"
+ ext = name[2:]
+ for c in ext:
+ if c in '*{}[]?/\\':
+ raise error.ParseError(_('reserved character: %s') % c)
+ return lambda n, s: n.endswith(ext)
+ elif name.startswith('path:'): # directory or full path test
+ p = name[5:] # prefix
+ pl = len(p)
+ f = lambda n, s: n.startswith(p) and (len(n) == pl or n[pl] == '/')
+ return f
+ raise error.ParseError(_("unsupported file pattern"),
+ hint=_('paths must be prefixed with "path:"'))
+ elif op == 'or':
+ func1 = _compile(tree[1])
+ func2 = _compile(tree[2])
+ return lambda n, s: func1(n, s) or func2(n, s)
+ elif op == 'and':
+ func1 = _compile(tree[1])
+ func2 = _compile(tree[2])
+ return lambda n, s: func1(n, s) and func2(n, s)
+ elif op == 'not':
+ return lambda n, s: not _compile(tree[1])(n, s)
+ elif op == 'group':
+ return _compile(tree[1])
+ elif op == 'func':
+ symbols = {
+ 'all': lambda n, s: True,
+ 'none': lambda n, s: False,
+ 'size': lambda n, s: fileset.sizematcher(tree[2])(s),
+ }
+
+ name = fileset.getsymbol(tree[1])
+ if name in symbols:
+ return symbols[name]
+
+ raise error.UnknownIdentifier(name, symbols.keys())
+ elif op == 'minus': # equivalent to 'x and not y'
+ func1 = _compile(tree[1])
+ func2 = _compile(tree[2])
+ return lambda n, s: func1(n, s) and not func2(n, s)
+ elif op == 'negate':
+ raise error.ParseError(_("can't use negate operator in this context"))
+ elif op == 'list':
+ raise error.ParseError(_("can't use a list in this context"),
+ hint=_('see hg help "filesets.x or y"'))
+ raise error.ProgrammingError('illegal tree: %r' % (tree,))
+
+def compile(text):
+ """generate a function (path, size) -> bool from filter specification.
+
+ "text" could contain the operators defined by the fileset language for
+ common logic operations, and parenthesis for grouping. The supported path
+ tests are '**.extname' for file extension test, and '"path:dir/subdir"'
+ for prefix test. The ``size()`` predicate is borrowed from filesets to test
+ file size. The predicates ``all()`` and ``none()`` are also supported.
+
+ '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for
+ example, will catch all php files whose size is greater than 10 MB, all
+ files whose name ends with ".zip", and all files under "bin" in the repo
+ root except for "bin/README".
+ """
+ tree = fileset.parse(text)
+ return _compile(tree)
--- a/mercurial/namespaces.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/namespaces.py Mon Jan 22 17:53:02 2018 -0500
@@ -25,6 +25,7 @@
def __init__(self):
self._names = util.sortdict()
+ columns = templatekw.getlogcolumns()
# we need current mercurial named objects (bookmarks, tags, and
# branches) to be initialized somewhere, so that place is here
@@ -32,8 +33,7 @@
bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name))
bmknodemap = lambda repo, node: repo.nodebookmarks(node)
n = namespace("bookmarks", templatename="bookmark",
- # i18n: column positioning for "hg log"
- logfmt=_("bookmark: %s\n"),
+ logfmt=columns['bookmark'],
listnames=bmknames,
namemap=bmknamemap, nodemap=bmknodemap,
builtin=True)
@@ -43,8 +43,7 @@
tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name))
tagnodemap = lambda repo, node: repo.nodetags(node)
n = namespace("tags", templatename="tag",
- # i18n: column positioning for "hg log"
- logfmt=_("tag: %s\n"),
+ logfmt=columns['tag'],
listnames=tagnames,
namemap=tagnamemap, nodemap=tagnodemap,
deprecated={'tip'},
@@ -55,8 +54,7 @@
bnamemap = lambda repo, name: tolist(repo.branchtip(name, True))
bnodemap = lambda repo, node: [repo[node].branch()]
n = namespace("branches", templatename="branch",
- # i18n: column positioning for "hg log"
- logfmt=_("branch: %s\n"),
+ logfmt=columns['branch'],
listnames=bnames,
namemap=bnamemap, nodemap=bnodemap,
builtin=True)
--- a/mercurial/obsolete.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/obsolete.py Mon Jan 22 17:53:02 2018 -0500
@@ -776,7 +776,7 @@
# rely on obsstore class default when possible.
kwargs = {}
if defaultformat is not None:
- kwargs['defaultformat'] = defaultformat
+ kwargs[r'defaultformat'] = defaultformat
readonly = not isenabled(repo, createmarkersopt)
store = obsstore(repo.svfs, readonly=readonly, **kwargs)
if store and readonly:
@@ -838,18 +838,10 @@
repo.ui.warn(_('unexpected old value for %r') % key)
return False
data = util.b85decode(new)
- lock = repo.lock()
- try:
- tr = repo.transaction('pushkey: obsolete markers')
- try:
- repo.obsstore.mergemarkers(tr, data)
- repo.invalidatevolatilesets()
- tr.close()
- return True
- finally:
- tr.release()
- finally:
- lock.release()
+ with repo.lock(), repo.transaction('pushkey: obsolete markers') as tr:
+ repo.obsstore.mergemarkers(tr, data)
+ repo.invalidatevolatilesets()
+ return True
# keep compatibility for the 4.3 cycle
def allprecursors(obsstore, nodes, ignoreflags=0):
@@ -994,10 +986,10 @@
public = phases.public
cl = repo.changelog
torev = cl.nodemap.get
- for ctx in repo.set('(not public()) and (not obsolete())'):
- rev = ctx.rev()
+ tonode = cl.node
+ for rev in repo.revs('(not public()) and (not obsolete())'):
# We only evaluate mutable, non-obsolete revision
- node = ctx.node()
+ node = tonode(rev)
# (future) A cache of predecessors may worth if split is very common
for pnode in obsutil.allpredecessors(repo.obsstore, [node],
ignoreflags=bumpedfix):
@@ -1023,8 +1015,10 @@
divergent = set()
obsstore = repo.obsstore
newermap = {}
- for ctx in repo.set('(not public()) - obsolete()'):
- mark = obsstore.predecessors.get(ctx.node(), ())
+ tonode = repo.changelog.node
+ for rev in repo.revs('(not public()) - obsolete()'):
+ node = tonode(rev)
+ mark = obsstore.predecessors.get(node, ())
toprocess = set(mark)
seen = set()
while toprocess:
@@ -1036,7 +1030,7 @@
obsutil.successorssets(repo, prec, cache=newermap)
newer = [n for n in newermap[prec] if n]
if len(newer) > 1:
- divergent.add(ctx.rev())
+ divergent.add(rev)
break
toprocess.update(obsstore.predecessors.get(prec, ()))
return divergent
@@ -1079,8 +1073,7 @@
saveeffectflag = repo.ui.configbool('experimental',
'evolution.effect-flags')
- tr = repo.transaction('add-obsolescence-marker')
- try:
+ with repo.transaction('add-obsolescence-marker') as tr:
markerargs = []
for rel in relations:
prec = rel[0]
@@ -1121,6 +1114,3 @@
date=date, metadata=localmetadata,
ui=repo.ui)
repo.filteredrevcache.clear()
- tr.close()
- finally:
- tr.release()
--- a/mercurial/obsutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/obsutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -9,9 +9,11 @@
import re
+from .i18n import _
from . import (
+ node as nodemod,
phases,
- util
+ util,
)
class marker(object):
@@ -441,12 +443,12 @@
public = phases.public
addedmarkers = tr.changes.get('obsmarkers')
addedrevs = tr.changes.get('revs')
- seenrevs = set(addedrevs)
+ seenrevs = set()
obsoleted = set()
for mark in addedmarkers:
node = mark[0]
rev = torev(node)
- if rev is None or rev in seenrevs:
+ if rev is None or rev in seenrevs or rev in addedrevs:
continue
seenrevs.add(rev)
if phase(repo, rev) == public:
@@ -751,8 +753,35 @@
return values
-def successorsetverb(successorset):
- """ Return the verb summarizing the successorset
+def _getobsfate(successorssets):
+ """ Compute a changeset obsolescence fate based on its successorssets.
+ Successors can be the tipmost ones or the immediate ones. This function
+ return values are not meant to be shown directly to users, it is meant to
+ be used by internal functions only.
+ Returns one fate from the following values:
+ - pruned
+ - diverged
+ - superseded
+ - superseded_split
+ """
+
+ if len(successorssets) == 0:
+ # The commit has been pruned
+ return 'pruned'
+ elif len(successorssets) > 1:
+ return 'diverged'
+ else:
+ # No divergence, only one set of successors
+ successors = successorssets[0]
+
+ if len(successors) == 1:
+ return 'superseded'
+ else:
+ return 'superseded_split'
+
+def obsfateverb(successorset, markers):
+ """ Return the verb summarizing the successorset and potentially using
+ information from the markers
"""
if not successorset:
verb = 'pruned'
@@ -795,7 +824,7 @@
line = []
# Verb
- line.append(successorsetverb(successors))
+ line.append(obsfateverb(successors, markers))
# Operations
operations = markersoperations(markers)
@@ -835,3 +864,43 @@
line.append(" (between %s and %s)" % (fmtmin_date, fmtmax_date))
return "".join(line)
+
+
+filteredmsgtable = {
+ "pruned": _("hidden revision '%s' is pruned"),
+ "diverged": _("hidden revision '%s' has diverged"),
+ "superseded": _("hidden revision '%s' was rewritten as: %s"),
+ "superseded_split": _("hidden revision '%s' was split as: %s"),
+ "superseded_split_several": _("hidden revision '%s' was split as: %s and "
+ "%d more"),
+}
+
+def _getfilteredreason(repo, changeid, ctx):
+ """return a human-friendly string on why a obsolete changeset is hidden
+ """
+ successors = successorssets(repo, ctx.node())
+ fate = _getobsfate(successors)
+
+ # Be more precise in case the revision is superseded
+ if fate == 'pruned':
+ return filteredmsgtable['pruned'] % changeid
+ elif fate == 'diverged':
+ return filteredmsgtable['diverged'] % changeid
+ elif fate == 'superseded':
+ single_successor = nodemod.short(successors[0][0])
+ return filteredmsgtable['superseded'] % (changeid, single_successor)
+ elif fate == 'superseded_split':
+
+ succs = []
+ for node_id in successors[0]:
+ succs.append(nodemod.short(node_id))
+
+ if len(succs) <= 2:
+ fmtsuccs = ', '.join(succs)
+ return filteredmsgtable['superseded_split'] % (changeid, fmtsuccs)
+ else:
+ firstsuccessors = ', '.join(succs[:2])
+ remainingnumber = len(succs) - 2
+
+ args = (changeid, firstsuccessors, remainingnumber)
+ return filteredmsgtable['superseded_split_several'] % args
--- a/mercurial/patch.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/patch.py Mon Jan 22 17:53:02 2018 -0500
@@ -10,7 +10,9 @@
import collections
import copy
+import difflib
import email
+import email.parser as emailparser
import errno
import hashlib
import os
@@ -45,6 +47,7 @@
gitre = re.compile(br'diff --git a/(.*) b/(.*)')
tabsplitter = re.compile(br'(\t+|[^\t]+)')
+_nonwordre = re.compile(br'([^a-zA-Z0-9_\x80-\xff])')
PatchError = error.PatchError
@@ -106,7 +109,7 @@
cur.append(line)
c = chunk(cur)
- m = email.Parser.Parser().parse(c)
+ m = emailparser.Parser().parse(c)
if not m.is_multipart():
yield msgfp(m)
else:
@@ -149,6 +152,8 @@
raise StopIteration
return l
+ __next__ = next
+
inheader = False
cur = []
@@ -203,7 +208,7 @@
# attempt to detect the start of a patch
# (this heuristic is borrowed from quilt)
- diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
+ diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
br'retrieving revision [0-9]+(\.[0-9]+)*$|'
br'---[ \t].*?^\+\+\+[ \t]|'
br'\*\*\*[ \t].*?^---[ \t])',
@@ -213,7 +218,7 @@
fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
tmpfp = os.fdopen(fd, pycompat.sysstr('w'))
try:
- msg = email.Parser.Parser().parse(fileobj)
+ msg = emailparser.Parser().parse(fileobj)
subject = msg['Subject'] and mail.headdecode(msg['Subject'])
data['user'] = msg['From'] and mail.headdecode(msg['From'])
@@ -997,16 +1002,26 @@
def getmessages():
return {
'multiple': {
+ 'apply': _("apply change %d/%d to '%s'?"),
'discard': _("discard change %d/%d to '%s'?"),
'record': _("record change %d/%d to '%s'?"),
- 'revert': _("revert change %d/%d to '%s'?"),
},
'single': {
+ 'apply': _("apply this change to '%s'?"),
'discard': _("discard this change to '%s'?"),
'record': _("record this change to '%s'?"),
- 'revert': _("revert this change to '%s'?"),
},
'help': {
+ 'apply': _('[Ynesfdaq?]'
+ '$$ &Yes, apply this change'
+ '$$ &No, skip this change'
+ '$$ &Edit this change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Apply remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Apply &all changes to all remaining files'
+ '$$ &Quit, applying no changes'
+ '$$ &? (display help)'),
'discard': _('[Ynesfdaq?]'
'$$ &Yes, discard this change'
'$$ &No, skip this change'
@@ -1027,16 +1042,6 @@
'$$ Record &all changes to all remaining files'
'$$ &Quit, recording no changes'
'$$ &? (display help)'),
- 'revert': _('[Ynesfdaq?]'
- '$$ &Yes, revert this change'
- '$$ &No, skip this change'
- '$$ &Edit this change manually'
- '$$ &Skip remaining changes to this file'
- '$$ Revert remaining changes to this &file'
- '$$ &Done, skip remaining changes and files'
- '$$ Revert &all changes to all remaining files'
- '$$ &Quit, reverting no changes'
- '$$ &? (display help)')
}
}
@@ -1990,14 +1995,16 @@
return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
prefix=prefix, eolmode=eolmode)
+def _canonprefix(repo, prefix):
+ if prefix:
+ prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
+ if prefix != '':
+ prefix += '/'
+ return prefix
+
def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
eolmode='strict'):
-
- if prefix:
- prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
- prefix)
- if prefix != '':
- prefix += '/'
+ prefix = _canonprefix(backend.repo, prefix)
def pstrip(p):
return pathtransform(p, strip - 1, prefix)[1]
@@ -2183,20 +2190,22 @@
return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
similarity)
-def changedfiles(ui, repo, patchpath, strip=1):
+def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
backend = fsbackend(ui, repo.root)
+ prefix = _canonprefix(repo, prefix)
with open(patchpath, 'rb') as fp:
changed = set()
for state, values in iterhunks(fp):
if state == 'file':
afile, bfile, first_hunk, gp = values
if gp:
- gp.path = pathtransform(gp.path, strip - 1, '')[1]
+ gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
if gp.oldpath:
- gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
+ gp.oldpath = pathtransform(gp.oldpath, strip - 1,
+ prefix)[1]
else:
gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
- '')
+ prefix)
changed.add(gp.path)
if gp.op == 'RENAME':
changed.add(gp.oldpath)
@@ -2246,6 +2255,7 @@
'showfunc': get('show_function', 'showfunc'),
'context': get('unified', getter=ui.config),
}
+ buildopts['worddiff'] = ui.configbool('experimental', 'worddiff')
if git:
buildopts['git'] = get('git')
@@ -2434,7 +2444,7 @@
modified = sorted(modifiedset)
added = sorted(addedset)
removed = sorted(removedset)
- for dst, src in copy.items():
+ for dst, src in list(copy.items()):
if src not in ctx1:
# Files merged in during a merge and then copied/renamed are
# reported as copies. We want to show them in the diff as additions.
@@ -2457,6 +2467,9 @@
def difflabel(func, *args, **kw):
'''yields 2-tuples of (output, label) based on the output of func()'''
+ inlinecolor = False
+ if kw.get(r'opts'):
+ inlinecolor = kw[r'opts'].worddiff
headprefixes = [('diff', 'diff.diffline'),
('copy', 'diff.extended'),
('rename', 'diff.extended'),
@@ -2473,6 +2486,9 @@
head = False
for chunk in func(*args, **kw):
lines = chunk.split('\n')
+ matches = {}
+ if inlinecolor:
+ matches = _findmatches(lines)
for i, line in enumerate(lines):
if i != 0:
yield ('\n', '')
@@ -2496,11 +2512,17 @@
for prefix, label in prefixes:
if stripline.startswith(prefix):
if diffline:
- for token in tabsplitter.findall(stripline):
- if '\t' == token[0]:
- yield (token, 'diff.tab')
- else:
- yield (token, label)
+ if i in matches:
+ for t, l in _inlinediff(lines[i].rstrip(),
+ lines[matches[i]].rstrip(),
+ label):
+ yield (t, l)
+ else:
+ for token in tabsplitter.findall(stripline):
+ if '\t' == token[0]:
+ yield (token, 'diff.tab')
+ else:
+ yield (token, label)
else:
yield (stripline, label)
break
@@ -2509,6 +2531,75 @@
if line != stripline:
yield (line[len(stripline):], 'diff.trailingwhitespace')
+def _findmatches(slist):
+ '''Look for insertion matches to deletion and returns a dict of
+ correspondences.
+ '''
+ lastmatch = 0
+ matches = {}
+ for i, line in enumerate(slist):
+ if line == '':
+ continue
+ if line[0] == '-':
+ lastmatch = max(lastmatch, i)
+ newgroup = False
+ for j, newline in enumerate(slist[lastmatch + 1:]):
+ if newline == '':
+ continue
+ if newline[0] == '-' and newgroup: # too far, no match
+ break
+ if newline[0] == '+': # potential match
+ newgroup = True
+ sim = difflib.SequenceMatcher(None, line, newline).ratio()
+ if sim > 0.7:
+ lastmatch = lastmatch + 1 + j
+ matches[i] = lastmatch
+ matches[lastmatch] = i
+ break
+ return matches
+
+def _inlinediff(s1, s2, operation):
+ '''Perform string diff to highlight specific changes.'''
+ operation_skip = '+?' if operation == 'diff.deleted' else '-?'
+ if operation == 'diff.deleted':
+ s2, s1 = s1, s2
+
+ buff = []
+ # we never want to higlight the leading +-
+ if operation == 'diff.deleted' and s2.startswith('-'):
+ label = operation
+ token = '-'
+ s2 = s2[1:]
+ s1 = s1[1:]
+ elif operation == 'diff.inserted' and s1.startswith('+'):
+ label = operation
+ token = '+'
+ s2 = s2[1:]
+ s1 = s1[1:]
+ else:
+ raise error.ProgrammingError("Case not expected, operation = %s" %
+ operation)
+
+ s = difflib.ndiff(_nonwordre.split(s2), _nonwordre.split(s1))
+ for part in s:
+ if part[0] in operation_skip or len(part) == 2:
+ continue
+ l = operation + '.highlight'
+ if part[0] in ' ':
+ l = operation
+ if part[2:] == '\t':
+ l = 'diff.tab'
+ if l == label: # contiguous token with same label
+ token += part[2:]
+ continue
+ else:
+ buff.append((token, label))
+ label = l
+ token = part[2:]
+ buff.append((token, label))
+
+ return buff
+
def diffui(*args, **kw):
'''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
return difflabel(diff, *args, **kw)
@@ -2564,7 +2655,7 @@
l = len(text)
s = hashlib.sha1('blob %d\0' % l)
s.update(text)
- return s.hexdigest()
+ return hex(s.digest())
if opts.noprefix:
aprefix = bprefix = ''
--- a/mercurial/phases.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/phases.py Mon Jan 22 17:53:02 2018 -0500
@@ -115,6 +115,7 @@
)
from . import (
error,
+ pycompat,
smartset,
txnutil,
util,
@@ -202,31 +203,43 @@
if _load:
# Cheap trick to allow shallow-copy without copy module
self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
- self._phaserevs = None
+ self._loadedrevslen = 0
self._phasesets = None
self.filterunknown(repo)
self.opener = repo.svfs
- def getrevset(self, repo, phases):
+ def getrevset(self, repo, phases, subset=None):
"""return a smartset for the given phases"""
self.loadphaserevs(repo) # ensure phase's sets are loaded
-
- if self._phasesets and all(self._phasesets[p] is not None
- for p in phases):
- # fast path - use _phasesets
- revs = self._phasesets[phases[0]]
- if len(phases) > 1:
- revs = revs.copy() # only copy when needed
- for p in phases[1:]:
- revs.update(self._phasesets[p])
+ phases = set(phases)
+ if public not in phases:
+ # fast path: _phasesets contains the interesting sets,
+ # might only need a union and post-filtering.
+ if len(phases) == 1:
+ [p] = phases
+ revs = self._phasesets[p]
+ else:
+ revs = set.union(*[self._phasesets[p] for p in phases])
if repo.changelog.filteredrevs:
revs = revs - repo.changelog.filteredrevs
- return smartset.baseset(revs)
+ if subset is None:
+ return smartset.baseset(revs)
+ else:
+ return subset & smartset.baseset(revs)
else:
- # slow path - enumerate all revisions
- phase = self.phase
- revs = (r for r in repo if phase(repo, r) in phases)
- return smartset.generatorset(revs, iterasc=True)
+ phases = set(allphases).difference(phases)
+ if not phases:
+ return smartset.fullreposet(repo)
+ if len(phases) == 1:
+ [p] = phases
+ revs = self._phasesets[p]
+ else:
+ revs = set.union(*[self._phasesets[p] for p in phases])
+ if subset is None:
+ subset = smartset.fullreposet(repo)
+ if not revs:
+ return subset
+ return subset.filter(lambda r: r not in revs)
def copy(self):
# Shallow copy meant to ensure isolation in
@@ -235,13 +248,14 @@
ph.phaseroots = self.phaseroots[:]
ph.dirty = self.dirty
ph.opener = self.opener
- ph._phaserevs = self._phaserevs
+ ph._loadedrevslen = self._loadedrevslen
ph._phasesets = self._phasesets
return ph
def replace(self, phcache):
"""replace all values in 'self' with content of phcache"""
- for a in ('phaseroots', 'dirty', 'opener', '_phaserevs', '_phasesets'):
+ for a in ('phaseroots', 'dirty', 'opener', '_loadedrevslen',
+ '_phasesets'):
setattr(self, a, getattr(phcache, a))
def _getphaserevsnative(self, repo):
@@ -253,42 +267,38 @@
def _computephaserevspure(self, repo):
repo = repo.unfiltered()
- revs = [public] * len(repo.changelog)
- self._phaserevs = revs
- self._populatephaseroots(repo)
- for phase in trackedphases:
- roots = list(map(repo.changelog.rev, self.phaseroots[phase]))
- if roots:
- for rev in roots:
- revs[rev] = phase
- for rev in repo.changelog.descendants(roots):
- revs[rev] = phase
+ cl = repo.changelog
+ self._phasesets = [set() for phase in allphases]
+ roots = pycompat.maplist(cl.rev, self.phaseroots[secret])
+ if roots:
+ ps = set(cl.descendants(roots))
+ for root in roots:
+ ps.add(root)
+ self._phasesets[secret] = ps
+ roots = pycompat.maplist(cl.rev, self.phaseroots[draft])
+ if roots:
+ ps = set(cl.descendants(roots))
+ for root in roots:
+ ps.add(root)
+ ps.difference_update(self._phasesets[secret])
+ self._phasesets[draft] = ps
+ self._loadedrevslen = len(cl)
def loadphaserevs(self, repo):
"""ensure phase information is loaded in the object"""
- if self._phaserevs is None:
+ if self._phasesets is None:
try:
res = self._getphaserevsnative(repo)
- self._phaserevs, self._phasesets = res
+ self._loadedrevslen, self._phasesets = res
except AttributeError:
self._computephaserevspure(repo)
def invalidate(self):
- self._phaserevs = None
+ self._loadedrevslen = 0
self._phasesets = None
- def _populatephaseroots(self, repo):
- """Fills the _phaserevs cache with phases for the roots.
- """
- cl = repo.changelog
- phaserevs = self._phaserevs
- for phase in trackedphases:
- roots = map(cl.rev, self.phaseroots[phase])
- for root in roots:
- phaserevs[root] = phase
-
def phase(self, repo, rev):
- # We need a repo argument here to be able to build _phaserevs
+ # We need a repo argument here to be able to build _phasesets
# if necessary. The repository instance is not stored in
# phasecache to avoid reference cycles. The changelog instance
# is not stored because it is a filecache() property and can
@@ -297,10 +307,13 @@
return public
if rev < nullrev:
raise ValueError(_('cannot lookup negative revision'))
- if self._phaserevs is None or rev >= len(self._phaserevs):
+ if rev >= self._loadedrevslen:
self.invalidate()
self.loadphaserevs(repo)
- return self._phaserevs[rev]
+ for phase in trackedphases:
+ if rev in self._phasesets[phase]:
+ return phase
+ return public
def write(self):
if not self.dirty:
@@ -455,10 +468,10 @@
if filtered:
self.dirty = True
# filterunknown is called by repo.destroyed, we may have no changes in
- # root but phaserevs contents is certainly invalid (or at least we
+ # root but _phasesets contents is certainly invalid (or at least we
# have not proper way to check that). related to issue 3858.
#
- # The other caller is __init__ that have no _phaserevs initialized
+ # The other caller is __init__ that have no _phasesets initialized
# anyway. If this change we should consider adding a dedicated
# "destroyed" function to phasecache or a proper cache key mechanism
# (see branchmap one)
--- a/mercurial/policy.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/policy.py Mon Jan 22 17:53:02 2018 -0500
@@ -74,8 +74,8 @@
(r'cext', r'bdiff'): 1,
(r'cext', r'diffhelpers'): 1,
(r'cext', r'mpatch'): 1,
- (r'cext', r'osutil'): 1,
- (r'cext', r'parsers'): 3,
+ (r'cext', r'osutil'): 3,
+ (r'cext', r'parsers'): 4,
}
# map import request to other package or module
--- a/mercurial/posix.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/posix.py Mon Jan 22 17:53:02 2018 -0500
@@ -24,9 +24,12 @@
from . import (
encoding,
error,
+ policy,
pycompat,
)
+osutil = policy.importmod(r'osutil')
+
posixfile = open
normpath = os.path.normpath
samestat = os.path.samestat
@@ -302,6 +305,20 @@
Returns None if the path is ok, or a UI string describing the problem.'''
return None # on posix platforms, every path is ok
+def getfsmountpoint(dirpath):
+ '''Get the filesystem mount point from a directory (best-effort)
+
+ Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
+ '''
+ return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
+
+def getfstype(dirpath):
+ '''Get the filesystem type name from a directory (best-effort)
+
+ Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
+ '''
+ return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
+
def setbinary(fd):
pass
--- a/mercurial/pycompat.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/pycompat.py Mon Jan 22 17:53:02 2018 -0500
@@ -63,6 +63,7 @@
sysexecutable = os.fsencode(sysexecutable)
stringio = io.BytesIO
maplist = lambda *args: list(map(*args))
+ ziplist = lambda *args: list(zip(*args))
rawinput = input
# TODO: .buffer might not exist if std streams were replaced; we'll need
@@ -214,7 +215,7 @@
def open(name, mode='r', buffering=-1):
return builtins.open(name, sysstr(mode), buffering)
- def getoptb(args, shortlist, namelist):
+ def _getoptbwrapper(orig, args, shortlist, namelist):
"""
Takes bytes arguments, converts them to unicode, pass them to
getopt.getopt(), convert the returned values back to bytes and then
@@ -224,7 +225,7 @@
args = [a.decode('latin-1') for a in args]
shortlist = shortlist.decode('latin-1')
namelist = [a.decode('latin-1') for a in namelist]
- opts, args = getopt.getopt(args, shortlist, namelist)
+ opts, args = orig(args, shortlist, namelist)
opts = [(a[0].encode('latin-1'), a[1].encode('latin-1'))
for a in opts]
args = [a.encode('latin-1') for a in args]
@@ -291,8 +292,8 @@
def getdoc(obj):
return getattr(obj, '__doc__', None)
- def getoptb(args, shortlist, namelist):
- return getopt.getopt(args, shortlist, namelist)
+ def _getoptbwrapper(orig, args, shortlist, namelist):
+ return orig(args, shortlist, namelist)
strkwargs = identity
byteskwargs = identity
@@ -313,6 +314,7 @@
shlexsplit = shlex.split
stringio = cStringIO.StringIO
maplist = map
+ ziplist = zip
rawinput = raw_input
isjython = sysplatform.startswith('java')
@@ -320,3 +322,9 @@
isdarwin = sysplatform == 'darwin'
isposix = osname == 'posix'
iswindows = osname == 'nt'
+
+def getoptb(args, shortlist, namelist):
+ return _getoptbwrapper(getopt.getopt, args, shortlist, namelist)
+
+def gnugetoptb(args, shortlist, namelist):
+ return _getoptbwrapper(getopt.gnu_getopt, args, shortlist, namelist)
--- a/mercurial/registrar.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/registrar.py Mon Jan 22 17:53:02 2018 -0500
@@ -112,35 +112,53 @@
The created object can be used as a decorator for adding commands to
that command table. This accepts multiple arguments to define a command.
- The first argument is the command name.
+ The first argument is the command name (as bytes).
- The options argument is an iterable of tuples defining command arguments.
- See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
+ The `options` keyword argument is an iterable of tuples defining command
+ arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each
+ tuple.
- The synopsis argument defines a short, one line summary of how to use the
+ The `synopsis` argument defines a short, one line summary of how to use the
command. This shows up in the help output.
- The norepo argument defines whether the command does not require a
+ There are three arguments that control what repository (if any) is found
+ and passed to the decorated function: `norepo`, `optionalrepo`, and
+ `inferrepo`.
+
+ The `norepo` argument defines whether the command does not require a
local repository. Most commands operate against a repository, thus the
- default is False.
+ default is False. When True, no repository will be passed.
- The optionalrepo argument defines whether the command optionally requires
- a local repository.
+ The `optionalrepo` argument defines whether the command optionally requires
+ a local repository. If no repository can be found, None will be passed
+ to the decorated function.
- The inferrepo argument defines whether to try to find a repository from the
- command line arguments. If True, arguments will be examined for potential
- repository locations. See ``findrepo()``. If a repository is found, it
- will be used.
+ The `inferrepo` argument defines whether to try to find a repository from
+ the command line arguments. If True, arguments will be examined for
+ potential repository locations. See ``findrepo()``. If a repository is
+ found, it will be used and passed to the decorated function.
There are three constants in the class which tells what type of the command
that is. That information will be helpful at various places. It will be also
be used to decide what level of access the command has on hidden commits.
The constants are:
- unrecoverablewrite is for those write commands which can't be recovered like
- push.
- recoverablewrite is for write commands which can be recovered like commit.
- readonly is for commands which are read only.
+ `unrecoverablewrite` is for those write commands which can't be recovered
+ like push.
+ `recoverablewrite` is for write commands which can be recovered like commit.
+ `readonly` is for commands which are read only.
+
+ The signature of the decorated function looks like this:
+ def cmd(ui[, repo] [, <args>] [, <options>])
+
+ `repo` is required if `norepo` is False.
+ `<args>` are positional args (or `*args`) arguments, of non-option
+ arguments from the command line.
+ `<options>` are keyword arguments (or `**options`) of option arguments
+ from the command line.
+
+ See the WritingExtensions and MercurialApi documentation for more exhaustive
+ descriptions and examples.
"""
unrecoverablewrite = "unrecoverable"
--- a/mercurial/repair.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/repair.py Mon Jan 22 17:53:02 2018 -0500
@@ -203,8 +203,9 @@
deleteobsmarkers(repo.obsstore, stripobsidx)
del repo.obsstore
+ repo.invalidatevolatilesets()
+ repo._phasecache.filterunknown(repo)
- repo._phasecache.filterunknown(repo)
if tmpbundlefile:
ui.note(_("adding branch\n"))
f = vfs.open(tmpbundlefile, "rb")
@@ -222,8 +223,6 @@
if not repo.ui.verbose:
repo.ui.popbuffer()
f.close()
- repo._phasecache.invalidate()
-
with repo.transaction('repair') as tr:
bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
--- a/mercurial/repoview.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/repoview.py Mon Jan 22 17:53:02 2018 -0500
@@ -9,11 +9,13 @@
from __future__ import absolute_import
import copy
+import weakref
from .node import nullrev
from . import (
obsolete,
phases,
+ pycompat,
tags as tagsmod,
)
@@ -63,7 +65,7 @@
hidden.remove(p)
stack.append(p)
-def computehidden(repo):
+def computehidden(repo, visibilityexceptions=None):
"""compute the set of hidden revision to filter
During most operation hidden should be filtered."""
@@ -72,6 +74,8 @@
hidden = hideablerevs(repo)
if hidden:
hidden = set(hidden - pinnedrevs(repo))
+ if visibilityexceptions:
+ hidden -= visibilityexceptions
pfunc = repo.changelog.parentrevs
mutablephases = (phases.draft, phases.secret)
mutable = repo._phasecache.getrevset(repo, mutablephases)
@@ -80,7 +84,7 @@
_revealancestors(pfunc, hidden, visible)
return frozenset(hidden)
-def computeunserved(repo):
+def computeunserved(repo, visibilityexceptions=None):
"""compute the set of revision that should be filtered when used a server
Secret and hidden changeset should not pretend to be here."""
@@ -98,7 +102,7 @@
else:
return hiddens
-def computemutable(repo):
+def computemutable(repo, visibilityexceptions=None):
assert not repo.changelog.filteredrevs
# fast check to avoid revset call on huge repo
if any(repo._phasecache.phaseroots[1:]):
@@ -107,7 +111,7 @@
return frozenset(r for r in maymutable if getphase(repo, r))
return frozenset()
-def computeimpactable(repo):
+def computeimpactable(repo, visibilityexceptions=None):
"""Everything impactable by mutable revision
The immutable filter still have some chance to get invalidated. This will
@@ -139,14 +143,21 @@
# Otherwise your filter will have to recompute all its branches cache
# from scratch (very slow).
filtertable = {'visible': computehidden,
+ 'visible-hidden': computehidden,
'served': computeunserved,
'immutable': computemutable,
'base': computeimpactable}
-def filterrevs(repo, filtername):
- """returns set of filtered revision for this filter name"""
+def filterrevs(repo, filtername, visibilityexceptions=None):
+ """returns set of filtered revision for this filter name
+
+ visibilityexceptions is a set of revs which must are exceptions for
+ hidden-state and must be visible. They are dynamic and hence we should not
+ cache it's result"""
if filtername not in repo.filteredrevcache:
func = filtertable[filtername]
+ if visibilityexceptions:
+ return func(repo.unfiltered, visibilityexceptions)
repo.filteredrevcache[filtername] = func(repo.unfiltered())
return repo.filteredrevcache[filtername]
@@ -185,11 +196,14 @@
subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
"""
- def __init__(self, repo, filtername):
+ def __init__(self, repo, filtername, visibilityexceptions=None):
object.__setattr__(self, r'_unfilteredrepo', repo)
object.__setattr__(self, r'filtername', filtername)
object.__setattr__(self, r'_clcachekey', None)
object.__setattr__(self, r'_clcache', None)
+ # revs which are exceptions and must not be hidden
+ object.__setattr__(self, r'_visibilityexceptions',
+ visibilityexceptions)
# not a propertycache on purpose we shall implement a proper cache later
@property
@@ -205,7 +219,7 @@
unfilen = len(unfiindex) - 1
unfinode = unfiindex[unfilen - 1][7]
- revs = filterrevs(unfi, self.filtername)
+ revs = filterrevs(unfi, self.filtername, self._visibilityexceptions)
cl = self._clcache
newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
# if cl.index is not unfiindex, unfi.changelog would be
@@ -225,11 +239,16 @@
"""Return an unfiltered version of a repo"""
return self._unfilteredrepo
- def filtered(self, name):
+ def filtered(self, name, visibilityexceptions=None):
"""Return a filtered version of a repository"""
- if name == self.filtername:
+ if name == self.filtername and not visibilityexceptions:
return self
- return self.unfiltered().filtered(name)
+ return self.unfiltered().filtered(name, visibilityexceptions)
+
+ def __repr__(self):
+ return r'<%s:%s %r>' % (self.__class__.__name__,
+ pycompat.sysstr(self.filtername),
+ self.unfiltered())
# everything access are forwarded to the proxied repo
def __getattr__(self, attr):
@@ -240,3 +259,16 @@
def __delattr__(self, attr):
return delattr(self._unfilteredrepo, attr)
+
+# Python <3.4 easily leaks types via __mro__. See
+# https://bugs.python.org/issue17950. We cache dynamically created types
+# so they won't be leaked on every invocation of repo.filtered().
+_filteredrepotypes = weakref.WeakKeyDictionary()
+
+def newtype(base):
+ """Create a new type with the repoview mixin and the given base class"""
+ if base not in _filteredrepotypes:
+ class filteredrepo(repoview, base):
+ pass
+ _filteredrepotypes[base] = filteredrepo
+ return _filteredrepotypes[base]
--- a/mercurial/revlog.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/revlog.py Mon Jan 22 17:53:02 2018 -0500
@@ -33,6 +33,9 @@
wdirrev,
)
from .i18n import _
+from .thirdparty import (
+ attr,
+)
from . import (
ancestor,
error,
@@ -251,6 +254,184 @@
if chunk:
yield chunk
+@attr.s(slots=True, frozen=True)
+class _deltainfo(object):
+ distance = attr.ib()
+ deltalen = attr.ib()
+ data = attr.ib()
+ base = attr.ib()
+ chainbase = attr.ib()
+ chainlen = attr.ib()
+ compresseddeltalen = attr.ib()
+
+class _deltacomputer(object):
+ def __init__(self, revlog):
+ self.revlog = revlog
+
+ def _getcandidaterevs(self, p1, p2, cachedelta):
+ """
+ Provides revisions that present an interest to be diffed against,
+ grouped by level of easiness.
+ """
+ revlog = self.revlog
+ curr = len(revlog)
+ prev = curr - 1
+ p1r, p2r = revlog.rev(p1), revlog.rev(p2)
+
+ # should we try to build a delta?
+ if prev != nullrev and revlog.storedeltachains:
+ tested = set()
+ # This condition is true most of the time when processing
+ # changegroup data into a generaldelta repo. The only time it
+ # isn't true is if this is the first revision in a delta chain
+ # or if ``format.generaldelta=true`` disabled ``lazydeltabase``.
+ if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
+ # Assume what we received from the server is a good choice
+ # build delta will reuse the cache
+ yield (cachedelta[0],)
+ tested.add(cachedelta[0])
+
+ if revlog._generaldelta:
+ # exclude already lazy tested base if any
+ parents = [p for p in (p1r, p2r)
+ if p != nullrev and p not in tested]
+ if parents and not revlog._aggressivemergedeltas:
+ # Pick whichever parent is closer to us (to minimize the
+ # chance of having to build a fulltext).
+ parents = [max(parents)]
+ tested.update(parents)
+ yield parents
+
+ if prev not in tested:
+ # other approach failed try against prev to hopefully save us a
+ # fulltext.
+ yield (prev,)
+
+ def buildtext(self, revinfo, fh):
+ """Builds a fulltext version of a revision
+
+ revinfo: _revisioninfo instance that contains all needed info
+ fh: file handle to either the .i or the .d revlog file,
+ depending on whether it is inlined or not
+ """
+ btext = revinfo.btext
+ if btext[0] is not None:
+ return btext[0]
+
+ revlog = self.revlog
+ cachedelta = revinfo.cachedelta
+ flags = revinfo.flags
+ node = revinfo.node
+
+ baserev = cachedelta[0]
+ delta = cachedelta[1]
+ # special case deltas which replace entire base; no need to decode
+ # base revision. this neatly avoids censored bases, which throw when
+ # they're decoded.
+ hlen = struct.calcsize(">lll")
+ if delta[:hlen] == mdiff.replacediffheader(revlog.rawsize(baserev),
+ len(delta) - hlen):
+ btext[0] = delta[hlen:]
+ else:
+ basetext = revlog.revision(baserev, _df=fh, raw=True)
+ btext[0] = mdiff.patch(basetext, delta)
+
+ try:
+ res = revlog._processflags(btext[0], flags, 'read', raw=True)
+ btext[0], validatehash = res
+ if validatehash:
+ revlog.checkhash(btext[0], node, p1=revinfo.p1, p2=revinfo.p2)
+ if flags & REVIDX_ISCENSORED:
+ raise RevlogError(_('node %s is not censored') % node)
+ except CensoredNodeError:
+ # must pass the censored index flag to add censored revisions
+ if not flags & REVIDX_ISCENSORED:
+ raise
+ return btext[0]
+
+ def _builddeltadiff(self, base, revinfo, fh):
+ revlog = self.revlog
+ t = self.buildtext(revinfo, fh)
+ if revlog.iscensored(base):
+ # deltas based on a censored revision must replace the
+ # full content in one patch, so delta works everywhere
+ header = mdiff.replacediffheader(revlog.rawsize(base), len(t))
+ delta = header + t
+ else:
+ ptext = revlog.revision(base, _df=fh, raw=True)
+ delta = mdiff.textdiff(ptext, t)
+
+ return delta
+
+ def _builddeltainfo(self, revinfo, base, fh):
+ # can we use the cached delta?
+ if revinfo.cachedelta and revinfo.cachedelta[0] == base:
+ delta = revinfo.cachedelta[1]
+ else:
+ delta = self._builddeltadiff(base, revinfo, fh)
+ revlog = self.revlog
+ header, data = revlog.compress(delta)
+ deltalen = len(header) + len(data)
+ chainbase = revlog.chainbase(base)
+ offset = revlog.end(len(revlog) - 1)
+ dist = deltalen + offset - revlog.start(chainbase)
+ if revlog._generaldelta:
+ deltabase = base
+ else:
+ deltabase = chainbase
+ chainlen, compresseddeltalen = revlog._chaininfo(base)
+ chainlen += 1
+ compresseddeltalen += deltalen
+ return _deltainfo(dist, deltalen, (header, data), deltabase,
+ chainbase, chainlen, compresseddeltalen)
+
+ def finddeltainfo(self, revinfo, fh):
+ """Find an acceptable delta against a candidate revision
+
+ revinfo: information about the revision (instance of _revisioninfo)
+ fh: file handle to either the .i or the .d revlog file,
+ depending on whether it is inlined or not
+
+ Returns the first acceptable candidate revision, as ordered by
+ _getcandidaterevs
+ """
+ cachedelta = revinfo.cachedelta
+ p1 = revinfo.p1
+ p2 = revinfo.p2
+ revlog = self.revlog
+
+ deltainfo = None
+ for candidaterevs in self._getcandidaterevs(p1, p2, cachedelta):
+ nominateddeltas = []
+ for candidaterev in candidaterevs:
+ candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
+ if revlog._isgooddeltainfo(candidatedelta, revinfo.textlen):
+ nominateddeltas.append(candidatedelta)
+ if nominateddeltas:
+ deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
+ break
+
+ return deltainfo
+
+@attr.s(slots=True, frozen=True)
+class _revisioninfo(object):
+ """Information about a revision that allows building its fulltext
+ node: expected hash of the revision
+ p1, p2: parent revs of the revision
+ btext: built text cache consisting of a one-element list
+ cachedelta: (baserev, uncompressed_delta) or None
+ flags: flags associated to the revision storage
+
+ One of btext[0] or cachedelta must be set.
+ """
+ node = attr.ib()
+ p1 = attr.ib()
+ p2 = attr.ib()
+ btext = attr.ib()
+ textlen = attr.ib()
+ cachedelta = attr.ib()
+ flags = attr.ib()
+
# index v0:
# 4 bytes: offset
# 4 bytes: compressed length
@@ -622,12 +803,14 @@
def parentrevs(self, rev):
try:
- return self.index[rev][5:7]
+ entry = self.index[rev]
except IndexError:
if rev == wdirrev:
raise error.WdirUnsupported
raise
+ return entry[5], entry[6]
+
def node(self, rev):
try:
return self.index[rev][7]
@@ -1687,7 +1870,7 @@
self._chunkclear()
def addrevision(self, text, transaction, link, p1, p2, cachedelta=None,
- node=None, flags=REVIDX_DEFAULT_FLAGS):
+ node=None, flags=REVIDX_DEFAULT_FLAGS, deltacomputer=None):
"""add a revision to the log
text - the revision data to add
@@ -1699,6 +1882,8 @@
computed by default as hash(text, p1, p2), however subclasses might
use different hashing method (and override checkhash() in such case)
flags - the known flags to set on the revision
+ deltacomputer - an optional _deltacomputer instance shared between
+ multiple calls
"""
if link == nullrev:
raise RevlogError(_("attempted to add linkrev -1 to %s")
@@ -1727,10 +1912,11 @@
self.checkhash(rawtext, node, p1=p1, p2=p2)
return self.addrawrevision(rawtext, transaction, link, p1, p2, node,
- flags, cachedelta=cachedelta)
+ flags, cachedelta=cachedelta,
+ deltacomputer=deltacomputer)
def addrawrevision(self, rawtext, transaction, link, p1, p2, node, flags,
- cachedelta=None):
+ cachedelta=None, deltacomputer=None):
"""add a raw revision with known flags, node and parents
useful when reusing a revision not stored in this revlog (ex: received
over wire, or read from an external bundle).
@@ -1741,7 +1927,8 @@
ifh = self.opener(self.indexfile, "a+", checkambig=self._checkambig)
try:
return self._addrevision(node, rawtext, transaction, link, p1, p2,
- flags, cachedelta, ifh, dfh)
+ flags, cachedelta, ifh, dfh,
+ deltacomputer=deltacomputer)
finally:
if dfh:
dfh.close()
@@ -1817,39 +2004,42 @@
return compressor.decompress(data)
- def _isgooddelta(self, d, textlen):
+ def _isgooddeltainfo(self, d, textlen):
"""Returns True if the given delta is good. Good means that it is within
the disk span, disk size, and chain length bounds that we know to be
performant."""
if d is None:
return False
- # - 'dist' is the distance from the base revision -- bounding it limits
- # the amount of I/O we need to do.
- # - 'compresseddeltalen' is the sum of the total size of deltas we need
- # to apply -- bounding it limits the amount of CPU we consume.
- dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
+ # - 'd.distance' is the distance from the base revision -- bounding it
+ # limits the amount of I/O we need to do.
+ # - 'd.compresseddeltalen' is the sum of the total size of deltas we
+ # need to apply -- bounding it limits the amount of CPU we consume.
defaultmax = textlen * 4
maxdist = self._maxdeltachainspan
if not maxdist:
- maxdist = dist # ensure the conditional pass
+ maxdist = d.distance # ensure the conditional pass
maxdist = max(maxdist, defaultmax)
- if (dist > maxdist or l > textlen or
- compresseddeltalen > textlen * 2 or
- (self._maxchainlen and chainlen > self._maxchainlen)):
+ if (d.distance > maxdist or d.deltalen > textlen or
+ d.compresseddeltalen > textlen * 2 or
+ (self._maxchainlen and d.chainlen > self._maxchainlen)):
return False
return True
def _addrevision(self, node, rawtext, transaction, link, p1, p2, flags,
- cachedelta, ifh, dfh, alwayscache=False):
+ cachedelta, ifh, dfh, alwayscache=False,
+ deltacomputer=None):
"""internal function to add revisions to the log
see addrevision for argument descriptions.
note: "addrevision" takes non-raw text, "_addrevision" takes raw text.
+ if "deltacomputer" is not provided or None, a defaultdeltacomputer will
+ be used.
+
invariants:
- rawtext is optional (can be None); if not set, cachedelta must be set.
if both are set, they must correspond to each other.
@@ -1861,76 +2051,16 @@
raise RevlogError(_("%s: attempt to add wdir revision") %
(self.indexfile))
- btext = [rawtext]
- def buildtext():
- if btext[0] is not None:
- return btext[0]
- baserev = cachedelta[0]
- delta = cachedelta[1]
- # special case deltas which replace entire base; no need to decode
- # base revision. this neatly avoids censored bases, which throw when
- # they're decoded.
- hlen = struct.calcsize(">lll")
- if delta[:hlen] == mdiff.replacediffheader(self.rawsize(baserev),
- len(delta) - hlen):
- btext[0] = delta[hlen:]
- else:
- if self._inline:
- fh = ifh
- else:
- fh = dfh
- basetext = self.revision(baserev, _df=fh, raw=True)
- btext[0] = mdiff.patch(basetext, delta)
+ if self._inline:
+ fh = ifh
+ else:
+ fh = dfh
- try:
- res = self._processflags(btext[0], flags, 'read', raw=True)
- btext[0], validatehash = res
- if validatehash:
- self.checkhash(btext[0], node, p1=p1, p2=p2)
- if flags & REVIDX_ISCENSORED:
- raise RevlogError(_('node %s is not censored') % node)
- except CensoredNodeError:
- # must pass the censored index flag to add censored revisions
- if not flags & REVIDX_ISCENSORED:
- raise
- return btext[0]
-
- def builddelta(rev):
- # can we use the cached delta?
- if cachedelta and cachedelta[0] == rev:
- delta = cachedelta[1]
- else:
- t = buildtext()
- if self.iscensored(rev):
- # deltas based on a censored revision must replace the
- # full content in one patch, so delta works everywhere
- header = mdiff.replacediffheader(self.rawsize(rev), len(t))
- delta = header + t
- else:
- if self._inline:
- fh = ifh
- else:
- fh = dfh
- ptext = self.revision(rev, _df=fh, raw=True)
- delta = mdiff.textdiff(ptext, t)
- header, data = self.compress(delta)
- deltalen = len(header) + len(data)
- chainbase = self.chainbase(rev)
- dist = deltalen + offset - self.start(chainbase)
- if self._generaldelta:
- base = rev
- else:
- base = chainbase
- chainlen, compresseddeltalen = self._chaininfo(rev)
- chainlen += 1
- compresseddeltalen += deltalen
- return (dist, deltalen, (header, data), base,
- chainbase, chainlen, compresseddeltalen)
+ btext = [rawtext]
curr = len(self)
prev = curr - 1
offset = self.end(prev)
- delta = None
p1r, p2r = self.rev(p1), self.rev(p2)
# full versions are inserted when the needed deltas
@@ -1941,46 +2071,19 @@
else:
textlen = len(rawtext)
- # should we try to build a delta?
- if prev != nullrev and self.storedeltachains:
- tested = set()
- # This condition is true most of the time when processing
- # changegroup data into a generaldelta repo. The only time it
- # isn't true is if this is the first revision in a delta chain
- # or if ``format.generaldelta=true`` disabled ``lazydeltabase``.
- if cachedelta and self._generaldelta and self._lazydeltabase:
- # Assume what we received from the server is a good choice
- # build delta will reuse the cache
- candidatedelta = builddelta(cachedelta[0])
- tested.add(cachedelta[0])
- if self._isgooddelta(candidatedelta, textlen):
- delta = candidatedelta
- if delta is None and self._generaldelta:
- # exclude already lazy tested base if any
- parents = [p for p in (p1r, p2r)
- if p != nullrev and p not in tested]
- if parents and not self._aggressivemergedeltas:
- # Pick whichever parent is closer to us (to minimize the
- # chance of having to build a fulltext).
- parents = [max(parents)]
- tested.update(parents)
- pdeltas = []
- for p in parents:
- pd = builddelta(p)
- if self._isgooddelta(pd, textlen):
- pdeltas.append(pd)
- if pdeltas:
- delta = min(pdeltas, key=lambda x: x[1])
- if delta is None and prev not in tested:
- # other approach failed try against prev to hopefully save us a
- # fulltext.
- candidatedelta = builddelta(prev)
- if self._isgooddelta(candidatedelta, textlen):
- delta = candidatedelta
- if delta is not None:
- dist, l, data, base, chainbase, chainlen, compresseddeltalen = delta
+ if deltacomputer is None:
+ deltacomputer = _deltacomputer(self)
+
+ revinfo = _revisioninfo(node, p1, p2, btext, textlen, cachedelta, flags)
+ deltainfo = deltacomputer.finddeltainfo(revinfo, fh)
+
+ if deltainfo is not None:
+ base = deltainfo.base
+ chainbase = deltainfo.chainbase
+ data = deltainfo.data
+ l = deltainfo.deltalen
else:
- rawtext = buildtext()
+ rawtext = deltacomputer.buildtext(revinfo, fh)
data = self.compress(rawtext)
l = len(data[1]) + len(data[0])
base = chainbase = curr
@@ -1994,7 +2097,7 @@
self._writeentry(transaction, ifh, dfh, entry, data, link, offset)
if alwayscache and rawtext is None:
- rawtext = buildtext()
+ rawtext = deltacomputer._buildtext(revinfo, fh)
if type(rawtext) == str: # only accept immutable objects
self._cache = (node, curr, rawtext)
@@ -2064,6 +2167,7 @@
dfh.flush()
ifh.flush()
try:
+ deltacomputer = _deltacomputer(self)
# loop through our set of deltas
for data in deltas:
node, p1, p2, linknode, deltabase, delta, flags = data
@@ -2110,7 +2214,8 @@
self._addrevision(node, None, transaction, link,
p1, p2, flags, (baserev, delta),
ifh, dfh,
- alwayscache=bool(addrevisioncb))
+ alwayscache=bool(addrevisioncb),
+ deltacomputer=deltacomputer)
if addrevisioncb:
addrevisioncb(self, node)
@@ -2264,7 +2369,9 @@
DELTAREUSESAMEREVS = 'samerevs'
DELTAREUSENEVER = 'never'
- DELTAREUSEALL = {'always', 'samerevs', 'never'}
+ DELTAREUSEFULLADD = 'fulladd'
+
+ DELTAREUSEALL = {'always', 'samerevs', 'never', 'fulladd'}
def clone(self, tr, destrevlog, addrevisioncb=None,
deltareuse=DELTAREUSESAMEREVS, aggressivemergedeltas=None):
@@ -2331,6 +2438,7 @@
populatecachedelta = deltareuse in (self.DELTAREUSEALWAYS,
self.DELTAREUSESAMEREVS)
+ deltacomputer = _deltacomputer(destrevlog)
index = self.index
for rev in self:
entry = index[rev]
@@ -2355,18 +2463,26 @@
if not cachedelta:
rawtext = self.revision(rev, raw=True)
- ifh = destrevlog.opener(destrevlog.indexfile, 'a+',
- checkambig=False)
- dfh = None
- if not destrevlog._inline:
- dfh = destrevlog.opener(destrevlog.datafile, 'a+')
- try:
- destrevlog._addrevision(node, rawtext, tr, linkrev, p1, p2,
- flags, cachedelta, ifh, dfh)
- finally:
- if dfh:
- dfh.close()
- ifh.close()
+
+ if deltareuse == self.DELTAREUSEFULLADD:
+ destrevlog.addrevision(rawtext, tr, linkrev, p1, p2,
+ cachedelta=cachedelta,
+ node=node, flags=flags,
+ deltacomputer=deltacomputer)
+ else:
+ ifh = destrevlog.opener(destrevlog.indexfile, 'a+',
+ checkambig=False)
+ dfh = None
+ if not destrevlog._inline:
+ dfh = destrevlog.opener(destrevlog.datafile, 'a+')
+ try:
+ destrevlog._addrevision(node, rawtext, tr, linkrev, p1,
+ p2, flags, cachedelta, ifh, dfh,
+ deltacomputer=deltacomputer)
+ finally:
+ if dfh:
+ dfh.close()
+ ifh.close()
if addrevisioncb:
addrevisioncb(self, rev, node)
--- a/mercurial/revset.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/revset.py Mon Jan 22 17:53:02 2018 -0500
@@ -22,6 +22,7 @@
obsutil,
pathutil,
phases,
+ pycompat,
registrar,
repoview,
revsetlang,
@@ -123,7 +124,7 @@
def rangeall(repo, subset, x, order):
assert x is None
- return _makerangeset(repo, subset, 0, len(repo) - 1, order)
+ return _makerangeset(repo, subset, 0, repo.changelog.tiprev(), order)
def rangepre(repo, subset, y, order):
# ':y' can't be rewritten to '0:y' since '0' may be hidden
@@ -136,7 +137,8 @@
m = getset(repo, fullreposet(repo), x)
if not m:
return baseset()
- return _makerangeset(repo, subset, m.first(), len(repo) - 1, order)
+ return _makerangeset(repo, subset, m.first(), repo.changelog.tiprev(),
+ order)
def _makerangeset(repo, subset, m, n, order):
if m == n:
@@ -144,7 +146,7 @@
elif n == node.wdirrev:
r = spanset(repo, m, len(repo)) + baseset([n])
elif m == node.wdirrev:
- r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1)
+ r = baseset([m]) + spanset(repo, repo.changelog.tiprev(), n - 1)
elif m < n:
r = spanset(repo, m, n + 1)
else:
@@ -266,7 +268,8 @@
def _destupdate(repo, subset, x):
# experimental revset for update destination
args = getargsdict(x, 'limit', 'clean')
- return subset & baseset([destutil.destupdate(repo, **args)[0]])
+ return subset & baseset([destutil.destupdate(repo,
+ **pycompat.strkwargs(args))[0]])
@predicate('_destmerge')
def _destmerge(repo, subset, x):
@@ -909,48 +912,43 @@
return limit(repo, subset, x, order)
def _follow(repo, subset, x, name, followfirst=False):
- l = getargs(x, 0, 2, _("%s takes no arguments or a pattern "
- "and an optional revset") % name)
- c = repo['.']
- if l:
- x = getstring(l[0], _("%s expected a pattern") % name)
- rev = None
- if len(l) >= 2:
- revs = getset(repo, fullreposet(repo), l[1])
- if len(revs) != 1:
- raise error.RepoLookupError(
- _("%s expected one starting revision") % name)
- rev = revs.last()
- c = repo[rev]
- matcher = matchmod.match(repo.root, repo.getcwd(), [x],
- ctx=repo[rev], default='path')
-
- files = c.manifest().walk(matcher)
-
- s = set()
- for fname in files:
- fctx = c[fname]
- s = s.union(set(c.rev() for c in fctx.ancestors(followfirst)))
- # include the revision responsible for the most recent version
- s.add(fctx.introrev())
+ args = getargsdict(x, name, 'file startrev')
+ revs = None
+ if 'startrev' in args:
+ revs = getset(repo, fullreposet(repo), args['startrev'])
+ if 'file' in args:
+ x = getstring(args['file'], _("%s expected a pattern") % name)
+ if revs is None:
+ revs = [None]
+ fctxs = []
+ for r in revs:
+ ctx = mctx = repo[r]
+ if r is None:
+ ctx = repo['.']
+ m = matchmod.match(repo.root, repo.getcwd(), [x],
+ ctx=mctx, default='path')
+ fctxs.extend(ctx[f].introfilectx() for f in ctx.manifest().walk(m))
+ s = dagop.filerevancestors(fctxs, followfirst)
else:
- s = dagop.revancestors(repo, baseset([c.rev()]), followfirst)
+ if revs is None:
+ revs = baseset([repo['.'].rev()])
+ s = dagop.revancestors(repo, revs, followfirst)
return subset & s
-@predicate('follow([pattern[, startrev]])', safe=True)
+@predicate('follow([file[, startrev]])', safe=True)
def follow(repo, subset, x):
"""
An alias for ``::.`` (ancestors of the working directory's first parent).
- If pattern is specified, the histories of files matching given
+ If file pattern is specified, the histories of files matching given
pattern in the revision given by startrev are followed, including copies.
"""
return _follow(repo, subset, x, 'follow')
@predicate('_followfirst', safe=True)
def _followfirst(repo, subset, x):
- # ``followfirst([pattern[, startrev]])``
- # Like ``follow([pattern[, startrev]])`` but follows only the first parent
+ # ``followfirst([file[, startrev]])``
+ # Like ``follow([file[, startrev]])`` but follows only the first parent
# of every revisions or files revisions.
return _follow(repo, subset, x, '_followfirst', followfirst=True)
@@ -1421,8 +1419,16 @@
l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
# i18n: "outgoing" is a keyword
dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
- dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
- dest, branches = hg.parseurl(dest)
+ if not dest:
+ # ui.paths.getpath() explicitly tests for None, not just a boolean
+ dest = None
+ path = repo.ui.paths.getpath(dest, default=('default-push', 'default'))
+ if not path:
+ raise error.Abort(_('default repository not configured!'),
+ hint=_("see 'hg help config.paths'"))
+ dest = path.pushloc or path.loc
+ branches = path.branch, []
+
revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
if revs:
revs = [repo.lookup(rev) for rev in revs]
@@ -1509,8 +1515,7 @@
def _phase(repo, subset, *targets):
"""helper to select all rev in <targets> phases"""
- s = repo._phasecache.getrevset(repo, targets)
- return subset & s
+ return repo._phasecache.getrevset(repo, targets, subset)
@predicate('draft()', safe=True)
def draft(repo, subset, x):
@@ -1617,11 +1622,7 @@
"""Changeset in public phase."""
# i18n: "public" is a keyword
getargs(x, 0, 0, _("public takes no arguments"))
- phase = repo._phasecache.phase
- target = phases.public
- condition = lambda r: phase(repo, r) == target
- return subset.filter(condition, condrepr=('<phase %r>', target),
- cache=False)
+ return _phase(repo, subset, phases.public)
@predicate('remote([id [,path]])', safe=False)
def remote(repo, subset, x):
--- a/mercurial/revsetlang.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/revsetlang.py Mon Jan 22 17:53:02 2018 -0500
@@ -27,8 +27,10 @@
"~": (18, None, None, ("ancestor", 18), None),
"^": (18, None, None, ("parent", 18), "parentpost"),
"-": (5, None, ("negate", 19), ("minus", 5), None),
- "::": (17, None, ("dagrangepre", 17), ("dagrange", 17), "dagrangepost"),
- "..": (17, None, ("dagrangepre", 17), ("dagrange", 17), "dagrangepost"),
+ "::": (17, "dagrangeall", ("dagrangepre", 17), ("dagrange", 17),
+ "dagrangepost"),
+ "..": (17, "dagrangeall", ("dagrangepre", 17), ("dagrange", 17),
+ "dagrangepost"),
":": (15, "rangeall", ("rangepre", 15), ("range", 15), "rangepost"),
"not": (10, None, ("not", 10), None, None),
"!": (10, None, ("not", 10), None, None),
@@ -288,6 +290,8 @@
post = ('parentpost', x[1])
if x[2][0] == 'dagrangepre':
return _fixops(('dagrange', post, x[2][1]))
+ elif x[2][0] == 'dagrangeall':
+ return _fixops(('dagrangepost', post))
elif x[2][0] == 'rangepre':
return _fixops(('range', post, x[2][1]))
elif x[2][0] == 'rangeall':
@@ -313,6 +317,8 @@
return _analyze(_build('only(_, _)', *x[1:]))
elif op == 'onlypost':
return _analyze(_build('only(_)', x[1]))
+ elif op == 'dagrangeall':
+ raise error.ParseError(_("can't use '::' in this context"))
elif op == 'dagrangepre':
return _analyze(_build('ancestors(_)', x[1]))
elif op == 'dagrangepost':
@@ -549,6 +555,52 @@
"""
return "'%s'" % util.escapestr(pycompat.bytestr(s))
+def _formatargtype(c, arg):
+ if c == 'd':
+ return '%d' % int(arg)
+ elif c == 's':
+ return _quote(arg)
+ elif c == 'r':
+ parse(arg) # make sure syntax errors are confined
+ return '(%s)' % arg
+ elif c == 'n':
+ return _quote(node.hex(arg))
+ elif c == 'b':
+ try:
+ return _quote(arg.branch())
+ except AttributeError:
+ raise TypeError
+ raise error.ParseError(_('unexpected revspec format character %s') % c)
+
+def _formatlistexp(s, t):
+ l = len(s)
+ if l == 0:
+ return "_list('')"
+ elif l == 1:
+ return _formatargtype(t, s[0])
+ elif t == 'd':
+ return "_intlist('%s')" % "\0".join('%d' % int(a) for a in s)
+ elif t == 's':
+ return "_list(%s)" % _quote("\0".join(s))
+ elif t == 'n':
+ return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
+ elif t == 'b':
+ try:
+ return "_list('%s')" % "\0".join(a.branch() for a in s)
+ except AttributeError:
+ raise TypeError
+
+ m = l // 2
+ return '(%s or %s)' % (_formatlistexp(s[:m], t), _formatlistexp(s[m:], t))
+
+def _formatparamexp(args, t):
+ return ', '.join(_formatargtype(t, a) for a in args)
+
+_formatlistfuncs = {
+ 'l': _formatlistexp,
+ 'p': _formatparamexp,
+}
+
def formatspec(expr, *args):
'''
This is a convenience function for using revsets internally, and
@@ -564,7 +616,8 @@
%n = hex(arg), single-quoted
%% = a literal '%'
- Prefixing the type with 'l' specifies a parenthesized list of that type.
+ Prefixing the type with 'l' specifies a parenthesized list of that type,
+ and 'p' specifies a list of function parameters of that type.
>>> formatspec(b'%r:: and %lr', b'10 or 11', (b"this()", b"that()"))
'(10 or 11):: and ((this()) or (that()))'
@@ -579,68 +632,61 @@
>>> formatspec(b'branch(%b)', b)
"branch('default')"
>>> formatspec(b'root(%ls)', [b'a', b'b', b'c', b'd'])
- "root(_list('a\\x00b\\x00c\\x00d'))"
+ "root(_list('a\\\\x00b\\\\x00c\\\\x00d'))"
+ >>> formatspec(b'sort(%r, %ps)', b':', [b'desc', b'user'])
+ "sort((:), 'desc', 'user')"
+ >>> formatspec('%ls', ['a', "'"])
+ "_list('a\\\\x00\\\\'')"
'''
-
- def argtype(c, arg):
- if c == 'd':
- return '%d' % int(arg)
- elif c == 's':
- return _quote(arg)
- elif c == 'r':
- parse(arg) # make sure syntax errors are confined
- return '(%s)' % arg
- elif c == 'n':
- return _quote(node.hex(arg))
- elif c == 'b':
- return _quote(arg.branch())
+ expr = pycompat.bytestr(expr)
+ argiter = iter(args)
+ ret = []
+ pos = 0
+ while pos < len(expr):
+ q = expr.find('%', pos)
+ if q < 0:
+ ret.append(expr[pos:])
+ break
+ ret.append(expr[pos:q])
+ pos = q + 1
+ try:
+ d = expr[pos]
+ except IndexError:
+ raise error.ParseError(_('incomplete revspec format character'))
+ if d == '%':
+ ret.append(d)
+ pos += 1
+ continue
- def listexp(s, t):
- l = len(s)
- if l == 0:
- return "_list('')"
- elif l == 1:
- return argtype(t, s[0])
- elif t == 'd':
- return "_intlist('%s')" % "\0".join('%d' % int(a) for a in s)
- elif t == 's':
- return "_list('%s')" % "\0".join(s)
- elif t == 'n':
- return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
- elif t == 'b':
- return "_list('%s')" % "\0".join(a.branch() for a in s)
-
- m = l // 2
- return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
-
- expr = pycompat.bytestr(expr)
- ret = ''
- pos = 0
- arg = 0
- while pos < len(expr):
- c = expr[pos]
- if c == '%':
+ try:
+ arg = next(argiter)
+ except StopIteration:
+ raise error.ParseError(_('missing argument for revspec'))
+ f = _formatlistfuncs.get(d)
+ if f:
+ # a list of some type
pos += 1
- d = expr[pos]
- if d == '%':
- ret += d
- elif d in 'dsnbr':
- ret += argtype(d, args[arg])
- arg += 1
- elif d == 'l':
- # a list of some type
- pos += 1
+ try:
d = expr[pos]
- ret += listexp(list(args[arg]), d)
- arg += 1
- else:
- raise error.Abort(_('unexpected revspec format character %s')
- % d)
+ except IndexError:
+ raise error.ParseError(_('incomplete revspec format character'))
+ try:
+ ret.append(f(list(arg), d))
+ except (TypeError, ValueError):
+ raise error.ParseError(_('invalid argument for revspec'))
else:
- ret += c
+ try:
+ ret.append(_formatargtype(d, arg))
+ except (TypeError, ValueError):
+ raise error.ParseError(_('invalid argument for revspec'))
pos += 1
- return ret
+ try:
+ next(argiter)
+ raise error.ParseError(_('too many revspec arguments specified'))
+ except StopIteration:
+ pass
+ return ''.join(ret)
def prettyformat(tree):
return parser.prettyformat(tree, ('string', 'symbol'))
@@ -661,3 +707,34 @@
if tree[0] == 'func':
funcs.add(tree[1][1])
return funcs
+
+_hashre = util.re.compile('[0-9a-fA-F]{1,40}$')
+
+def _ishashlikesymbol(symbol):
+ """returns true if the symbol looks like a hash"""
+ return _hashre.match(symbol)
+
+def gethashlikesymbols(tree):
+ """returns the list of symbols of the tree that look like hashes
+
+ >>> gethashlikesymbols(('dagrange', ('symbol', '3'), ('symbol', 'abe3ff')))
+ ['3', 'abe3ff']
+ >>> gethashlikesymbols(('func', ('symbol', 'precursors'), ('symbol', '.')))
+ []
+ >>> gethashlikesymbols(('func', ('symbol', 'precursors'), ('symbol', '34')))
+ ['34']
+ >>> gethashlikesymbols(('symbol', 'abe3ffZ'))
+ []
+ """
+ if not tree:
+ return []
+
+ if tree[0] == "symbol":
+ if _ishashlikesymbol(tree[1]):
+ return [tree[1]]
+ elif len(tree) >= 3:
+ results = []
+ for subtree in tree[1:]:
+ results += gethashlikesymbols(subtree)
+ return results
+ return []
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/rewriteutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,53 @@
+# rewriteutil.py - utility functions for rewriting changesets
+#
+# Copyright 2017 Octobus <contact@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from .i18n import _
+
+from . import (
+ error,
+ node,
+ obsolete,
+ revset,
+)
+
+def precheck(repo, revs, action='rewrite'):
+ """check if revs can be rewritten
+ action is used to control the error message.
+
+ Make sure this function is called after taking the lock.
+ """
+ if node.nullrev in revs:
+ msg = _("cannot %s null changeset") % (action)
+ hint = _("no changeset checked out")
+ raise error.Abort(msg, hint=hint)
+
+ publicrevs = repo.revs('%ld and public()', revs)
+ if len(repo[None].parents()) > 1:
+ raise error.Abort(_("cannot %s while merging") % action)
+
+ if publicrevs:
+ msg = _("cannot %s public changesets") % (action)
+ hint = _("see 'hg help phases' for details")
+ raise error.Abort(msg, hint=hint)
+
+ newunstable = disallowednewunstable(repo, revs)
+ if newunstable:
+ raise error.Abort(_("cannot %s changeset with children") % action)
+
+def disallowednewunstable(repo, revs):
+ """Checks whether editing the revs will create new unstable changesets and
+ are we allowed to create them.
+
+ To allow new unstable changesets, set the config:
+ `experimental.evolution.allowunstable=True`
+ """
+ allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
+ if allowunstable:
+ return revset.baseset()
+ return repo.revs("(%ld::) - %ld", revs, revs)
--- a/mercurial/scmutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/scmutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -1100,12 +1100,11 @@
finally:
if proc:
proc.communicate()
- if proc.returncode != 0:
- # not an error so 'cmd | grep' can be empty
- repo.ui.debug("extdata command '%s' %s\n"
- % (cmd, util.explainexit(proc.returncode)[0]))
if src:
src.close()
+ if proc and proc.returncode != 0:
+ raise error.Abort(_("extdata command '%s' failed: %s")
+ % (cmd, util.explainexit(proc.returncode)[0]))
return data
@@ -1223,6 +1222,9 @@
'unbundle',
]
+# A marker that tells the evolve extension to suppress its own reporting
+_reportstroubledchangesets = True
+
def registersummarycallback(repo, otr, txnname=''):
"""register a callback to issue a summary after the transaction is closed
"""
@@ -1245,7 +1247,7 @@
if filtername:
repo = repo.filtered(filtername)
func(repo, tr)
- newcat = '%2i-txnreport' % len(categories)
+ newcat = '%02i-txnreport' % len(categories)
otr.addpostclose(newcat, wrapped)
categories.append(newcat)
return wrapped
@@ -1258,11 +1260,38 @@
repo.ui.status(_('obsoleted %i changesets\n')
% len(obsoleted))
+ if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
+ repo.ui.configbool('experimental', 'evolution.report-instabilities')):
+ instabilitytypes = [
+ ('orphan', 'orphan'),
+ ('phase-divergent', 'phasedivergent'),
+ ('content-divergent', 'contentdivergent'),
+ ]
+
+ def getinstabilitycounts(repo):
+ filtered = repo.changelog.filteredrevs
+ counts = {}
+ for instability, revset in instabilitytypes:
+ counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
+ filtered)
+ return counts
+
+ oldinstabilitycounts = getinstabilitycounts(repo)
+ @reportsummary
+ def reportnewinstabilities(repo, tr):
+ newinstabilitycounts = getinstabilitycounts(repo)
+ for instability, revset in instabilitytypes:
+ delta = (newinstabilitycounts[instability] -
+ oldinstabilitycounts[instability])
+ if delta > 0:
+ repo.ui.warn(_('%i new %s changesets\n') %
+ (delta, instability))
+
if txmatch(_reportnewcssource):
@reportsummary
def reportnewcs(repo, tr):
"""Report the range of new revisions pulled/unbundled."""
- newrevs = list(tr.changes.get('revs', set()))
+ newrevs = tr.changes.get('revs', xrange(0, 0))
if not newrevs:
return
@@ -1279,3 +1308,108 @@
else:
revrange = '%s:%s' % (minrev, maxrev)
repo.ui.status(_('new changesets %s\n') % revrange)
+
+def nodesummaries(repo, nodes, maxnumnodes=4):
+ if len(nodes) <= maxnumnodes or repo.ui.verbose:
+ return ' '.join(short(h) for h in nodes)
+ first = ' '.join(short(h) for h in nodes[:maxnumnodes])
+ return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
+
+def enforcesinglehead(repo, tr, desc):
+ """check that no named branch has multiple heads"""
+ if desc in ('strip', 'repair'):
+ # skip the logic during strip
+ return
+ visible = repo.filtered('visible')
+ # possible improvement: we could restrict the check to affected branch
+ for name, heads in visible.branchmap().iteritems():
+ if len(heads) > 1:
+ msg = _('rejecting multiple heads on branch "%s"')
+ msg %= name
+ hint = _('%d heads: %s')
+ hint %= (len(heads), nodesummaries(repo, heads))
+ raise error.Abort(msg, hint=hint)
+
+def wrapconvertsink(sink):
+ """Allow extensions to wrap the sink returned by convcmd.convertsink()
+ before it is used, whether or not the convert extension was formally loaded.
+ """
+ return sink
+
+def unhidehashlikerevs(repo, specs, hiddentype):
+ """parse the user specs and unhide changesets whose hash or revision number
+ is passed.
+
+ hiddentype can be: 1) 'warn': warn while unhiding changesets
+ 2) 'nowarn': don't warn while unhiding changesets
+
+ returns a repo object with the required changesets unhidden
+ """
+ if not repo.filtername or not repo.ui.configbool('experimental',
+ 'directaccess'):
+ return repo
+
+ if repo.filtername not in ('visible', 'visible-hidden'):
+ return repo
+
+ symbols = set()
+ for spec in specs:
+ try:
+ tree = revsetlang.parse(spec)
+ except error.ParseError: # will be reported by scmutil.revrange()
+ continue
+
+ symbols.update(revsetlang.gethashlikesymbols(tree))
+
+ if not symbols:
+ return repo
+
+ revs = _getrevsfromsymbols(repo, symbols)
+
+ if not revs:
+ return repo
+
+ if hiddentype == 'warn':
+ unfi = repo.unfiltered()
+ revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
+ repo.ui.warn(_("warning: accessing hidden changesets for write "
+ "operation: %s\n") % revstr)
+
+ # we have to use new filtername to separate branch/tags cache until we can
+ # disbale these cache when revisions are dynamically pinned.
+ return repo.filtered('visible-hidden', revs)
+
+def _getrevsfromsymbols(repo, symbols):
+ """parse the list of symbols and returns a set of revision numbers of hidden
+ changesets present in symbols"""
+ revs = set()
+ unfi = repo.unfiltered()
+ unficl = unfi.changelog
+ cl = repo.changelog
+ tiprev = len(unficl)
+ pmatch = unficl._partialmatch
+ allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
+ for s in symbols:
+ try:
+ n = int(s)
+ if n <= tiprev:
+ if not allowrevnums:
+ continue
+ else:
+ if n not in cl:
+ revs.add(n)
+ continue
+ except ValueError:
+ pass
+
+ try:
+ s = pmatch(s)
+ except error.LookupError:
+ s = None
+
+ if s is not None:
+ rev = unficl.rev(s)
+ if rev not in cl:
+ revs.add(rev)
+
+ return revs
--- a/mercurial/selectors2.py Mon Jan 08 16:07:51 2018 -0800
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,745 +0,0 @@
-""" Back-ported, durable, and portable selectors """
-
-# MIT License
-#
-# Copyright (c) 2017 Seth Michael Larson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-# no-check-code
-
-from __future__ import absolute_import
-
-import collections
-import errno
-import math
-import select
-import socket
-import sys
-import time
-
-from . import pycompat
-
-namedtuple = collections.namedtuple
-Mapping = collections.Mapping
-
-try:
- monotonic = time.monotonic
-except AttributeError:
- monotonic = time.time
-
-__author__ = 'Seth Michael Larson'
-__email__ = 'sethmichaellarson@protonmail.com'
-__version__ = '2.0.0'
-__license__ = 'MIT'
-__url__ = 'https://www.github.com/SethMichaelLarson/selectors2'
-
-__all__ = ['EVENT_READ',
- 'EVENT_WRITE',
- 'SelectorKey',
- 'DefaultSelector',
- 'BaseSelector']
-
-EVENT_READ = (1 << 0)
-EVENT_WRITE = (1 << 1)
-_DEFAULT_SELECTOR = None
-_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None.
-_ERROR_TYPES = (OSError, IOError, socket.error)
-
-
-SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
-
-
-class _SelectorMapping(Mapping):
- """ Mapping of file objects to selector keys """
-
- def __init__(self, selector):
- self._selector = selector
-
- def __len__(self):
- return len(self._selector._fd_to_key)
-
- def __getitem__(self, fileobj):
- try:
- fd = self._selector._fileobj_lookup(fileobj)
- return self._selector._fd_to_key[fd]
- except KeyError:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- def __iter__(self):
- return iter(self._selector._fd_to_key)
-
-
-def _fileobj_to_fd(fileobj):
- """ Return a file descriptor from a file object. If
- given an integer will simply return that integer back. """
- if isinstance(fileobj, int):
- fd = fileobj
- else:
- try:
- fd = int(fileobj.fileno())
- except (AttributeError, TypeError, ValueError):
- raise ValueError("Invalid file object: {0!r}".format(fileobj))
- if fd < 0:
- raise ValueError("Invalid file descriptor: {0}".format(fd))
- return fd
-
-
-class BaseSelector(object):
- """ Abstract Selector class
-
- A selector supports registering file objects to be monitored
- for specific I/O events.
-
- A file object is a file descriptor or any object with a
- `fileno()` method. An arbitrary object can be attached to the
- file object which can be used for example to store context info,
- a callback, etc.
-
- A selector can use various implementations (select(), poll(), epoll(),
- and kqueue()) depending on the platform. The 'DefaultSelector' class uses
- the most efficient implementation for the current platform.
- """
- def __init__(self):
- # Maps file descriptors to keys.
- self._fd_to_key = {}
-
- # Read-only mapping returned by get_map()
- self._map = _SelectorMapping(self)
-
- def _fileobj_lookup(self, fileobj):
- """ Return a file descriptor from a file object.
- This wraps _fileobj_to_fd() to do an exhaustive
- search in case the object is invalid but we still
- have it in our map. Used by unregister() so we can
- unregister an object that was previously registered
- even if it is closed. It is also used by _SelectorMapping
- """
- try:
- return _fileobj_to_fd(fileobj)
- except ValueError:
-
- # Search through all our mapped keys.
- for key in self._fd_to_key.values():
- if key.fileobj is fileobj:
- return key.fd
-
- # Raise ValueError after all.
- raise
-
- def register(self, fileobj, events, data=None):
- """ Register a file object for a set of events to monitor. """
- if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
- raise ValueError("Invalid events: {0!r}".format(events))
-
- key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
-
- if key.fd in self._fd_to_key:
- raise KeyError("{0!r} (FD {1}) is already registered"
- .format(fileobj, key.fd))
-
- self._fd_to_key[key.fd] = key
- return key
-
- def unregister(self, fileobj):
- """ Unregister a file object from being monitored. """
- try:
- key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- # Getting the fileno of a closed socket on Windows errors with EBADF.
- except socket.error as err:
- if err.errno != errno.EBADF:
- raise
- else:
- for key in self._fd_to_key.values():
- if key.fileobj is fileobj:
- self._fd_to_key.pop(key.fd)
- break
- else:
- raise KeyError("{0!r} is not registered".format(fileobj))
- return key
-
- def modify(self, fileobj, events, data=None):
- """ Change a registered file object monitored events and data. """
- # NOTE: Some subclasses optimize this operation even further.
- try:
- key = self._fd_to_key[self._fileobj_lookup(fileobj)]
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- if events != key.events:
- self.unregister(fileobj)
- key = self.register(fileobj, events, data)
-
- elif data != key.data:
- # Use a shortcut to update the data.
- key = key._replace(data=data)
- self._fd_to_key[key.fd] = key
-
- return key
-
- def select(self, timeout=None):
- """ Perform the actual selection until some monitored file objects
- are ready or the timeout expires. """
- raise NotImplementedError()
-
- def close(self):
- """ Close the selector. This must be called to ensure that all
- underlying resources are freed. """
- self._fd_to_key.clear()
- self._map = None
-
- def get_key(self, fileobj):
- """ Return the key associated with a registered file object. """
- mapping = self.get_map()
- if mapping is None:
- raise RuntimeError("Selector is closed")
- try:
- return mapping[fileobj]
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- def get_map(self):
- """ Return a mapping of file objects to selector keys """
- return self._map
-
- def _key_from_fd(self, fd):
- """ Return the key associated to a given file descriptor
- Return None if it is not found. """
- try:
- return self._fd_to_key[fd]
- except KeyError:
- return None
-
- def __enter__(self):
- return self
-
- def __exit__(self, *_):
- self.close()
-
-
-# Almost all platforms have select.select()
-if hasattr(select, "select"):
- class SelectSelector(BaseSelector):
- """ Select-based selector. """
- def __init__(self):
- super(SelectSelector, self).__init__()
- self._readers = set()
- self._writers = set()
-
- def register(self, fileobj, events, data=None):
- key = super(SelectSelector, self).register(fileobj, events, data)
- if events & EVENT_READ:
- self._readers.add(key.fd)
- if events & EVENT_WRITE:
- self._writers.add(key.fd)
- return key
-
- def unregister(self, fileobj):
- key = super(SelectSelector, self).unregister(fileobj)
- self._readers.discard(key.fd)
- self._writers.discard(key.fd)
- return key
-
- def select(self, timeout=None):
- # Selecting on empty lists on Windows errors out.
- if not len(self._readers) and not len(self._writers):
- return []
-
- timeout = None if timeout is None else max(timeout, 0.0)
- ready = []
- r, w, _ = _syscall_wrapper(self._wrap_select, True, self._readers,
- self._writers, timeout)
- r = set(r)
- w = set(w)
- for fd in r | w:
- events = 0
- if fd in r:
- events |= EVENT_READ
- if fd in w:
- events |= EVENT_WRITE
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
- return ready
-
- def _wrap_select(self, r, w, timeout=None):
- """ Wrapper for select.select because timeout is a positional arg """
- return select.select(r, w, [], timeout)
-
- __all__.append('SelectSelector')
-
- # Jython has a different implementation of .fileno() for socket objects.
- if pycompat.isjython:
- class _JythonSelectorMapping(object):
- """ This is an implementation of _SelectorMapping that is built
- for use specifically with Jython, which does not provide a hashable
- value from socket.socket.fileno(). """
-
- def __init__(self, selector):
- assert isinstance(selector, JythonSelectSelector)
- self._selector = selector
-
- def __len__(self):
- return len(self._selector._sockets)
-
- def __getitem__(self, fileobj):
- for sock, key in self._selector._sockets:
- if sock is fileobj:
- return key
- else:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- class JythonSelectSelector(SelectSelector):
- """ This is an implementation of SelectSelector that is for Jython
- which works around that Jython's socket.socket.fileno() does not
- return an integer fd value. All SelectorKey.fd will be equal to -1
- and should not be used. This instead uses object id to compare fileobj
- and will only use select.select as it's the only selector that allows
- directly passing in socket objects rather than registering fds.
- See: http://bugs.jython.org/issue1678
- https://wiki.python.org/jython/NewSocketModule#socket.fileno.28.29_does_not_return_an_integer
- """
-
- def __init__(self):
- super(JythonSelectSelector, self).__init__()
-
- self._sockets = [] # Uses a list of tuples instead of dictionary.
- self._map = _JythonSelectorMapping(self)
- self._readers = []
- self._writers = []
-
- # Jython has a select.cpython_compatible_select function in older versions.
- self._select_func = getattr(select, 'cpython_compatible_select', select.select)
-
- def register(self, fileobj, events, data=None):
- for sock, _ in self._sockets:
- if sock is fileobj:
- raise KeyError("{0!r} is already registered"
- .format(fileobj, sock))
-
- key = SelectorKey(fileobj, -1, events, data)
- self._sockets.append((fileobj, key))
-
- if events & EVENT_READ:
- self._readers.append(fileobj)
- if events & EVENT_WRITE:
- self._writers.append(fileobj)
- return key
-
- def unregister(self, fileobj):
- for i, (sock, key) in enumerate(self._sockets):
- if sock is fileobj:
- break
- else:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- if key.events & EVENT_READ:
- self._readers.remove(fileobj)
- if key.events & EVENT_WRITE:
- self._writers.remove(fileobj)
-
- del self._sockets[i]
- return key
-
- def _wrap_select(self, r, w, timeout=None):
- """ Wrapper for select.select because timeout is a positional arg """
- return self._select_func(r, w, [], timeout)
-
- __all__.append('JythonSelectSelector')
- SelectSelector = JythonSelectSelector # Override so the wrong selector isn't used.
-
-
-if hasattr(select, "poll"):
- class PollSelector(BaseSelector):
- """ Poll-based selector """
- def __init__(self):
- super(PollSelector, self).__init__()
- self._poll = select.poll()
-
- def register(self, fileobj, events, data=None):
- key = super(PollSelector, self).register(fileobj, events, data)
- event_mask = 0
- if events & EVENT_READ:
- event_mask |= select.POLLIN
- if events & EVENT_WRITE:
- event_mask |= select.POLLOUT
- self._poll.register(key.fd, event_mask)
- return key
-
- def unregister(self, fileobj):
- key = super(PollSelector, self).unregister(fileobj)
- self._poll.unregister(key.fd)
- return key
-
- def _wrap_poll(self, timeout=None):
- """ Wrapper function for select.poll.poll() so that
- _syscall_wrapper can work with only seconds. """
- if timeout is not None:
- if timeout <= 0:
- timeout = 0
- else:
- # select.poll.poll() has a resolution of 1 millisecond,
- # round away from zero to wait *at least* timeout seconds.
- timeout = math.ceil(timeout * 1000)
-
- result = self._poll.poll(timeout)
- return result
-
- def select(self, timeout=None):
- ready = []
- fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.POLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.POLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
-
- return ready
-
- __all__.append('PollSelector')
-
-if hasattr(select, "epoll"):
- class EpollSelector(BaseSelector):
- """ Epoll-based selector """
- def __init__(self):
- super(EpollSelector, self).__init__()
- self._epoll = select.epoll()
-
- def fileno(self):
- return self._epoll.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(EpollSelector, self).register(fileobj, events, data)
- events_mask = 0
- if events & EVENT_READ:
- events_mask |= select.EPOLLIN
- if events & EVENT_WRITE:
- events_mask |= select.EPOLLOUT
- _syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
- return key
-
- def unregister(self, fileobj):
- key = super(EpollSelector, self).unregister(fileobj)
- try:
- _syscall_wrapper(self._epoll.unregister, False, key.fd)
- except _ERROR_TYPES:
- # This can occur when the fd was closed since registry.
- pass
- return key
-
- def select(self, timeout=None):
- if timeout is not None:
- if timeout <= 0:
- timeout = 0.0
- else:
- # select.epoll.poll() has a resolution of 1 millisecond
- # but luckily takes seconds so we don't need a wrapper
- # like PollSelector. Just for better rounding.
- timeout = math.ceil(timeout * 1000) * 0.001
- timeout = float(timeout)
- else:
- timeout = -1.0 # epoll.poll() must have a float.
-
- # We always want at least 1 to ensure that select can be called
- # with no file descriptors registered. Otherwise will fail.
- max_events = max(len(self._fd_to_key), 1)
-
- ready = []
- fd_events = _syscall_wrapper(self._epoll.poll, True,
- timeout=timeout,
- maxevents=max_events)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.EPOLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.EPOLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
- return ready
-
- def close(self):
- self._epoll.close()
- super(EpollSelector, self).close()
-
- __all__.append('EpollSelector')
-
-
-if hasattr(select, "devpoll"):
- class DevpollSelector(BaseSelector):
- """Solaris /dev/poll selector."""
-
- def __init__(self):
- super(DevpollSelector, self).__init__()
- self._devpoll = select.devpoll()
-
- def fileno(self):
- return self._devpoll.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(DevpollSelector, self).register(fileobj, events, data)
- poll_events = 0
- if events & EVENT_READ:
- poll_events |= select.POLLIN
- if events & EVENT_WRITE:
- poll_events |= select.POLLOUT
- self._devpoll.register(key.fd, poll_events)
- return key
-
- def unregister(self, fileobj):
- key = super(DevpollSelector, self).unregister(fileobj)
- self._devpoll.unregister(key.fd)
- return key
-
- def _wrap_poll(self, timeout=None):
- """ Wrapper function for select.poll.poll() so that
- _syscall_wrapper can work with only seconds. """
- if timeout is not None:
- if timeout <= 0:
- timeout = 0
- else:
- # select.devpoll.poll() has a resolution of 1 millisecond,
- # round away from zero to wait *at least* timeout seconds.
- timeout = math.ceil(timeout * 1000)
-
- result = self._devpoll.poll(timeout)
- return result
-
- def select(self, timeout=None):
- ready = []
- fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.POLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.POLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
-
- return ready
-
- def close(self):
- self._devpoll.close()
- super(DevpollSelector, self).close()
-
- __all__.append('DevpollSelector')
-
-
-if hasattr(select, "kqueue"):
- class KqueueSelector(BaseSelector):
- """ Kqueue / Kevent-based selector """
- def __init__(self):
- super(KqueueSelector, self).__init__()
- self._kqueue = select.kqueue()
-
- def fileno(self):
- return self._kqueue.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(KqueueSelector, self).register(fileobj, events, data)
- if events & EVENT_READ:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_READ,
- select.KQ_EV_ADD)
-
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
-
- if events & EVENT_WRITE:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_WRITE,
- select.KQ_EV_ADD)
-
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
-
- return key
-
- def unregister(self, fileobj):
- key = super(KqueueSelector, self).unregister(fileobj)
- if key.events & EVENT_READ:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_READ,
- select.KQ_EV_DELETE)
- try:
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
- except _ERROR_TYPES:
- pass
- if key.events & EVENT_WRITE:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_WRITE,
- select.KQ_EV_DELETE)
- try:
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
- except _ERROR_TYPES:
- pass
-
- return key
-
- def select(self, timeout=None):
- if timeout is not None:
- timeout = max(timeout, 0)
-
- max_events = len(self._fd_to_key) * 2
- ready_fds = {}
-
- kevent_list = _syscall_wrapper(self._kqueue.control, True,
- None, max_events, timeout)
-
- for kevent in kevent_list:
- fd = kevent.ident
- event_mask = kevent.filter
- events = 0
- if event_mask == select.KQ_FILTER_READ:
- events |= EVENT_READ
- if event_mask == select.KQ_FILTER_WRITE:
- events |= EVENT_WRITE
-
- key = self._key_from_fd(fd)
- if key:
- if key.fd not in ready_fds:
- ready_fds[key.fd] = (key, events & key.events)
- else:
- old_events = ready_fds[key.fd][1]
- ready_fds[key.fd] = (key, (events | old_events) & key.events)
-
- return list(ready_fds.values())
-
- def close(self):
- self._kqueue.close()
- super(KqueueSelector, self).close()
-
- __all__.append('KqueueSelector')
-
-
-def _can_allocate(struct):
- """ Checks that select structs can be allocated by the underlying
- operating system, not just advertised by the select module. We don't
- check select() because we'll be hopeful that most platforms that
- don't have it available will not advertise it. (ie: GAE) """
- try:
- # select.poll() objects won't fail until used.
- if struct == 'poll':
- p = select.poll()
- p.poll(0)
-
- # All others will fail on allocation.
- else:
- getattr(select, struct)().close()
- return True
- except (OSError, AttributeError):
- return False
-
-
-# Python 3.5 uses a more direct route to wrap system calls to increase speed.
-if sys.version_info >= (3, 5):
- def _syscall_wrapper(func, _, *args, **kwargs):
- """ This is the short-circuit version of the below logic
- because in Python 3.5+ all selectors restart system calls. """
- return func(*args, **kwargs)
-else:
- def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
- """ Wrapper function for syscalls that could fail due to EINTR.
- All functions should be retried if there is time left in the timeout
- in accordance with PEP 475. """
- timeout = kwargs.get("timeout", None)
- if timeout is None:
- expires = None
- recalc_timeout = False
- else:
- timeout = float(timeout)
- if timeout < 0.0: # Timeout less than 0 treated as no timeout.
- expires = None
- else:
- expires = monotonic() + timeout
-
- args = list(args)
- if recalc_timeout and "timeout" not in kwargs:
- raise ValueError(
- "Timeout must be in args or kwargs to be recalculated")
-
- result = _SYSCALL_SENTINEL
- while result is _SYSCALL_SENTINEL:
- try:
- result = func(*args, **kwargs)
- # OSError is thrown by select.select
- # IOError is thrown by select.epoll.poll
- # select.error is thrown by select.poll.poll
- # Aren't we thankful for Python 3.x rework for exceptions?
- except (OSError, IOError, select.error) as e:
- # select.error wasn't a subclass of OSError in the past.
- errcode = None
- if hasattr(e, "errno"):
- errcode = e.errno
- elif hasattr(e, "args"):
- errcode = e.args[0]
-
- # Also test for the Windows equivalent of EINTR.
- is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
- errcode == errno.WSAEINTR))
-
- if is_interrupt:
- if expires is not None:
- current_time = monotonic()
- if current_time > expires:
- raise OSError(errno=errno.ETIMEDOUT)
- if recalc_timeout:
- if "timeout" in kwargs:
- kwargs["timeout"] = expires - current_time
- continue
- raise
- return result
-
-
-# Choose the best implementation, roughly:
-# kqueue == devpoll == epoll > poll > select
-# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
-def DefaultSelector():
- """ This function serves as a first call for DefaultSelector to
- detect if the select module is being monkey-patched incorrectly
- by eventlet, greenlet, and preserve proper behavior. """
- global _DEFAULT_SELECTOR
- if _DEFAULT_SELECTOR is None:
- if pycompat.isjython:
- _DEFAULT_SELECTOR = JythonSelectSelector
- elif _can_allocate('kqueue'):
- _DEFAULT_SELECTOR = KqueueSelector
- elif _can_allocate('devpoll'):
- _DEFAULT_SELECTOR = DevpollSelector
- elif _can_allocate('epoll'):
- _DEFAULT_SELECTOR = EpollSelector
- elif _can_allocate('poll'):
- _DEFAULT_SELECTOR = PollSelector
- elif hasattr(select, 'select'):
- _DEFAULT_SELECTOR = SelectSelector
- else: # Platform-specific: AppEngine
- raise RuntimeError('Platform does not have a selector.')
- return _DEFAULT_SELECTOR()
--- a/mercurial/setdiscovery.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/setdiscovery.py Mon Jan 22 17:53:02 2018 -0500
@@ -133,7 +133,8 @@
def findcommonheads(ui, local, remote,
initialsamplesize=100,
fullsamplesize=200,
- abortwhenunrelated=True):
+ abortwhenunrelated=True,
+ ancestorsof=None):
'''Return a tuple (common, anyincoming, remoteheads) used to identify
missing nodes from or in remote.
'''
@@ -141,7 +142,11 @@
roundtrips = 0
cl = local.changelog
- dag = dagutil.revlogdag(cl)
+ localsubset = None
+ if ancestorsof is not None:
+ rev = local.changelog.rev
+ localsubset = [rev(n) for n in ancestorsof]
+ dag = dagutil.revlogdag(cl, localsubset=localsubset)
# early exit if we know all the specified remote heads already
ui.debug("query 1; heads\n")
--- a/mercurial/simplemerge.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/simplemerge.py Mon Jan 22 17:53:02 2018 -0500
@@ -418,6 +418,8 @@
The merged result is written into `localctx`.
"""
+ opts = pycompat.byteskwargs(opts)
+
def readctx(ctx):
# Merges were always run in the working copy before, which means
# they used decoded data, if the user defined any repository
--- a/mercurial/smartset.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/smartset.py Mon Jan 22 17:53:02 2018 -0500
@@ -772,6 +772,16 @@
>>> xs.last() # cached
4
"""
+ def __new__(cls, gen, iterasc=None):
+ if iterasc is None:
+ typ = cls
+ elif iterasc:
+ typ = _generatorsetasc
+ else:
+ typ = _generatorsetdesc
+
+ return super(generatorset, cls).__new__(typ)
+
def __init__(self, gen, iterasc=None):
"""
gen: a generator producing the values for the generatorset.
@@ -782,13 +792,6 @@
self._genlist = []
self._finished = False
self._ascending = True
- if iterasc is not None:
- if iterasc:
- self.fastasc = self._iterator
- self.__contains__ = self._asccontains
- else:
- self.fastdesc = self._iterator
- self.__contains__ = self._desccontains
def __nonzero__(self):
# Do not use 'for r in self' because it will enforce the iteration
@@ -814,36 +817,6 @@
self._cache[x] = False
return False
- def _asccontains(self, x):
- """version of contains optimised for ascending generator"""
- if x in self._cache:
- return self._cache[x]
-
- # Use new values only, as existing values would be cached.
- for l in self._consumegen():
- if l == x:
- return True
- if l > x:
- break
-
- self._cache[x] = False
- return False
-
- def _desccontains(self, x):
- """version of contains optimised for descending generator"""
- if x in self._cache:
- return self._cache[x]
-
- # Use new values only, as existing values would be cached.
- for l in self._consumegen():
- if l == x:
- return True
- if l < x:
- break
-
- self._cache[x] = False
- return False
-
def __iter__(self):
if self._ascending:
it = self.fastasc
@@ -947,7 +920,45 @@
def __repr__(self):
d = {False: '-', True: '+'}[self._ascending]
- return '<%s%s>' % (type(self).__name__, d)
+ return '<%s%s>' % (type(self).__name__.lstrip('_'), d)
+
+class _generatorsetasc(generatorset):
+ """Special case of generatorset optimized for ascending generators."""
+
+ fastasc = generatorset._iterator
+
+ def __contains__(self, x):
+ if x in self._cache:
+ return self._cache[x]
+
+ # Use new values only, as existing values would be cached.
+ for l in self._consumegen():
+ if l == x:
+ return True
+ if l > x:
+ break
+
+ self._cache[x] = False
+ return False
+
+class _generatorsetdesc(generatorset):
+ """Special case of generatorset optimized for descending generators."""
+
+ fastdesc = generatorset._iterator
+
+ def __contains__(self, x):
+ if x in self._cache:
+ return self._cache[x]
+
+ # Use new values only, as existing values would be cached.
+ for l in self._consumegen():
+ if l == x:
+ return True
+ if l < x:
+ break
+
+ self._cache[x] = False
+ return False
def spanset(repo, start=0, end=None):
"""Create a spanset that represents a range of repository revisions
--- a/mercurial/sparse.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/sparse.py Mon Jan 22 17:53:02 2018 -0500
@@ -12,7 +12,10 @@
import os
from .i18n import _
-from .node import nullid
+from .node import (
+ hex,
+ nullid,
+)
from . import (
error,
match as matchmod,
@@ -173,12 +176,12 @@
tempsignature = '0'
if signature is None or (includetemp and tempsignature is None):
- signature = hashlib.sha1(repo.vfs.tryread('sparse')).hexdigest()
+ signature = hex(hashlib.sha1(repo.vfs.tryread('sparse')).digest())
cache['signature'] = signature
if includetemp:
raw = repo.vfs.tryread('tempsparse')
- tempsignature = hashlib.sha1(raw).hexdigest()
+ tempsignature = hex(hashlib.sha1(raw).digest())
cache['tempsignature'] = tempsignature
return '%s %s' % (signature, tempsignature)
@@ -291,24 +294,9 @@
includes, excludes, profiles = patternsforrev(repo, rev)
if includes or excludes:
- # Explicitly include subdirectories of includes so
- # status will walk them down to the actual include.
- subdirs = set()
- for include in includes:
- # TODO consider using posix path functions here so Windows
- # \ directory separators don't come into play.
- dirname = os.path.dirname(include)
- # basename is used to avoid issues with absolute
- # paths (which on Windows can include the drive).
- while os.path.basename(dirname):
- subdirs.add(dirname)
- dirname = os.path.dirname(dirname)
-
matcher = matchmod.match(repo.root, '', [],
include=includes, exclude=excludes,
default='relpath')
- if subdirs:
- matcher = forceincludematcher(matcher, subdirs)
matchers.append(matcher)
except IOError:
pass
--- a/mercurial/sshpeer.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/sshpeer.py Mon Jan 22 17:53:02 2018 -0500
@@ -18,9 +18,9 @@
)
def _serverquote(s):
+ """quote a string for the remote shell ... which we assume is sh"""
if not s:
return s
- '''quote a string for the remote shell ... which we assume is sh'''
if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
return s
return "'%s'" % s.replace("'", "'\\''")
@@ -136,6 +136,8 @@
sshcmd = self.ui.config("ui", "ssh")
remotecmd = self.ui.config("ui", "remotecmd")
+ sshaddenv = dict(self.ui.configitems("sshenv"))
+ sshenv = util.shellenviron(sshaddenv)
args = util.sshargs(sshcmd, self._host, self._user, self._port)
@@ -144,11 +146,11 @@
util.shellquote("%s init %s" %
(_serverquote(remotecmd), _serverquote(self._path))))
ui.debug('running %s\n' % cmd)
- res = ui.system(cmd, blockedtag='sshpeer')
+ res = ui.system(cmd, blockedtag='sshpeer', environ=sshenv)
if res != 0:
self._abort(error.RepoError(_("could not create remote repo")))
- self._validaterepo(sshcmd, args, remotecmd)
+ self._validaterepo(sshcmd, args, remotecmd, sshenv)
# Begin of _basepeer interface.
@@ -180,7 +182,7 @@
# End of _basewirecommands interface.
- def _validaterepo(self, sshcmd, args, remotecmd):
+ def _validaterepo(self, sshcmd, args, remotecmd, sshenv=None):
# cleanup up previous run
self._cleanup()
@@ -196,7 +198,7 @@
# no buffer allow the use of 'select'
# feel free to remove buffering and select usage when we ultimately
# move to threading.
- sub = util.popen4(cmd, bufsize=0)
+ sub = util.popen4(cmd, bufsize=0, env=sshenv)
self._pipeo, self._pipei, self._pipee, self._subprocess = sub
self._pipei = util.bufferedinputpipe(self._pipei)
@@ -204,8 +206,9 @@
self._pipeo = doublepipe(self.ui, self._pipeo, self._pipee)
def badresponse():
- self._abort(error.RepoError(_('no suitable response from '
- 'remote hg')))
+ msg = _("no suitable response from remote hg")
+ hint = self.ui.config("ui", "ssherrorhint")
+ self._abort(error.RepoError(msg, hint=hint))
try:
# skip any noise generated by remote shell
@@ -280,6 +283,17 @@
def _callstream(self, cmd, **args):
args = pycompat.byteskwargs(args)
+ if (self.ui.debugflag
+ and self.ui.configbool('devel', 'debug.peer-request')):
+ dbg = self.ui.debug
+ line = 'devel-peer-request: %s\n'
+ dbg(line % cmd)
+ for key, value in sorted(args.items()):
+ if not isinstance(value, dict):
+ dbg(line % ' %s: %d bytes' % (key, len(value)))
+ else:
+ for dk, dv in sorted(value.items()):
+ dbg(line % ' %s-%s: %d' % (key, dk, len(dv)))
self.ui.debug("sending %s command\n" % cmd)
self._pipeo.write("%s\n" % cmd)
_func, names = wireproto.commands[cmd]
--- a/mercurial/sshserver.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/sshserver.py Mon Jan 22 17:53:02 2018 -0500
@@ -76,13 +76,7 @@
def sendstream(self, source):
write = self.fout.write
-
- if source.reader:
- gen = iter(lambda: source.reader.read(4096), '')
- else:
- gen = source.gen
-
- for chunk in gen:
+ for chunk in source.gen:
write(chunk)
self.fout.flush()
@@ -111,6 +105,7 @@
handlers = {
str: sendresponse,
wireproto.streamres: sendstream,
+ wireproto.streamres_legacy: sendstream,
wireproto.pushres: sendpushresponse,
wireproto.pusherr: sendpusherror,
wireproto.ooberror: sendooberror,
--- a/mercurial/sslutil.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/sslutil.py Mon Jan 22 17:53:02 2018 -0500
@@ -17,6 +17,7 @@
from .i18n import _
from . import (
error,
+ node,
pycompat,
util,
)
@@ -96,13 +97,13 @@
# in this legacy code since we don't support SNI.
args = {
- 'keyfile': self._keyfile,
- 'certfile': self._certfile,
- 'server_side': server_side,
- 'cert_reqs': self.verify_mode,
- 'ssl_version': self.protocol,
- 'ca_certs': self._cacerts,
- 'ciphers': self._ciphers,
+ r'keyfile': self._keyfile,
+ r'certfile': self._certfile,
+ r'server_side': server_side,
+ r'cert_reqs': self.verify_mode,
+ r'ssl_version': self.protocol,
+ r'ca_certs': self._cacerts,
+ r'ciphers': self._ciphers,
}
return ssl.wrap_socket(socket, **args)
@@ -808,9 +809,9 @@
# If a certificate fingerprint is pinned, use it and only it to
# validate the remote cert.
peerfingerprints = {
- 'sha1': hashlib.sha1(peercert).hexdigest(),
- 'sha256': hashlib.sha256(peercert).hexdigest(),
- 'sha512': hashlib.sha512(peercert).hexdigest(),
+ 'sha1': node.hex(hashlib.sha1(peercert).digest()),
+ 'sha256': node.hex(hashlib.sha256(peercert).digest()),
+ 'sha512': node.hex(hashlib.sha512(peercert).digest()),
}
def fmtfingerprint(s):
--- a/mercurial/statichttprepo.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/statichttprepo.py Mon Jan 22 17:53:02 2018 -0500
@@ -166,8 +166,6 @@
self.encodepats = None
self.decodepats = None
self._transref = None
- # Cache of types representing filtered repos.
- self._filteredrepotypes = {}
def _restrictcapabilities(self, caps):
caps = super(statichttprepository, self)._restrictcapabilities(caps)
--- a/mercurial/statprof.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/statprof.py Mon Jan 22 17:53:02 2018 -0500
@@ -815,7 +815,6 @@
tos = sample.stack[0]
name = tos.function
path = simplifypath(tos.path)
- category = '%s:%d' % (path, tos.lineno)
stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno),
frame.function) for frame in sample.stack))
qstack = collections.deque(stack)
@@ -922,7 +921,7 @@
load_data(path=path)
- display(**displayargs)
+ display(**pycompat.strkwargs(displayargs))
return 0
--- a/mercurial/store.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/store.py Mon Jan 22 17:53:02 2018 -0500
@@ -15,6 +15,7 @@
from .i18n import _
from . import (
error,
+ node,
policy,
pycompat,
util,
@@ -221,7 +222,7 @@
_maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
def _hashencode(path, dotencode):
- digest = hashlib.sha1(path).hexdigest()
+ digest = node.hex(hashlib.sha1(path).digest())
le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
parts = _auxencode(le, dotencode)
basename = parts[-1]
--- a/mercurial/streamclone.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/streamclone.py Mon Jan 22 17:53:02 2018 -0500
@@ -7,23 +7,27 @@
from __future__ import absolute_import
+import contextlib
+import os
import struct
+import tempfile
+import warnings
from .i18n import _
from . import (
branchmap,
+ cacheutil,
error,
phases,
store,
util,
)
-def canperformstreamclone(pullop, bailifbundle2supported=False):
+def canperformstreamclone(pullop, bundle2=False):
"""Whether it is possible to perform a streaming clone as part of pull.
- ``bailifbundle2supported`` will cause the function to return False if
- bundle2 stream clones are supported. It should only be called by the
- legacy stream clone code path.
+ ``bundle2`` will cause the function to consider stream clone through
+ bundle2 and only through bundle2.
Returns a tuple of (supported, requirements). ``supported`` is True if
streaming clone is supported and False otherwise. ``requirements`` is
@@ -35,18 +39,18 @@
bundle2supported = False
if pullop.canusebundle2:
- if 'v1' in pullop.remotebundle2caps.get('stream', []):
+ if 'v2' in pullop.remotebundle2caps.get('stream', []):
bundle2supported = True
# else
# Server doesn't support bundle2 stream clone or doesn't support
# the versions we support. Fall back and possibly allow legacy.
# Ensures legacy code path uses available bundle2.
- if bailifbundle2supported and bundle2supported:
+ if bundle2supported and not bundle2:
return False, None
# Ensures bundle2 doesn't try to do a stream clone if it isn't supported.
- #elif not bailifbundle2supported and not bundle2supported:
- # return False, None
+ elif bundle2 and not bundle2supported:
+ return False, None
# Streaming clone only works on empty repositories.
if len(repo):
@@ -235,10 +239,26 @@
def generatev1wireproto(repo):
"""Emit content for version 1 of streaming clone suitable for the wire.
- This is the data output from ``generatev1()`` with a header line
- indicating file count and byte size.
+ This is the data output from ``generatev1()`` with 2 header lines. The
+ first line indicates overall success. The 2nd contains the file count and
+ byte size of payload.
+
+ The success line contains "0" for success, "1" for stream generation not
+ allowed, and "2" for error locking the repository (possibly indicating
+ a permissions error for the server process).
"""
- filecount, bytecount, it = generatev1(repo)
+ if not allowservergeneration(repo):
+ yield '1\n'
+ return
+
+ try:
+ filecount, bytecount, it = generatev1(repo)
+ except error.LockError:
+ yield '2\n'
+ return
+
+ # Indicates successful response.
+ yield '0\n'
yield '%d %d\n' % (filecount, bytecount)
for chunk in it:
yield chunk
@@ -412,3 +432,203 @@
def apply(self, repo):
return applybundlev1(repo, self._fh)
+
+# type of file to stream
+_fileappend = 0 # append only file
+_filefull = 1 # full snapshot file
+
+# Source of the file
+_srcstore = 's' # store (svfs)
+_srccache = 'c' # cache (cache)
+
+# This is it's own function so extensions can override it.
+def _walkstreamfullstorefiles(repo):
+ """list snapshot file from the store"""
+ fnames = []
+ if not repo.publishing():
+ fnames.append('phaseroots')
+ return fnames
+
+def _filterfull(entry, copy, vfsmap):
+ """actually copy the snapshot files"""
+ src, name, ftype, data = entry
+ if ftype != _filefull:
+ return entry
+ return (src, name, ftype, copy(vfsmap[src].join(name)))
+
+@contextlib.contextmanager
+def maketempcopies():
+ """return a function to temporary copy file"""
+ files = []
+ try:
+ def copy(src):
+ fd, dst = tempfile.mkstemp()
+ os.close(fd)
+ files.append(dst)
+ util.copyfiles(src, dst, hardlink=True)
+ return dst
+ yield copy
+ finally:
+ for tmp in files:
+ util.tryunlink(tmp)
+
+def _makemap(repo):
+ """make a (src -> vfs) map for the repo"""
+ vfsmap = {
+ _srcstore: repo.svfs,
+ _srccache: repo.cachevfs,
+ }
+ # we keep repo.vfs out of the on purpose, ther are too many danger there
+ # (eg: .hg/hgrc)
+ assert repo.vfs not in vfsmap.values()
+
+ return vfsmap
+
+def _emit(repo, entries, totalfilesize):
+ """actually emit the stream bundle"""
+ vfsmap = _makemap(repo)
+ progress = repo.ui.progress
+ progress(_('bundle'), 0, total=totalfilesize, unit=_('bytes'))
+ with maketempcopies() as copy:
+ try:
+ # copy is delayed until we are in the try
+ entries = [_filterfull(e, copy, vfsmap) for e in entries]
+ yield None # this release the lock on the repository
+ seen = 0
+
+ for src, name, ftype, data in entries:
+ vfs = vfsmap[src]
+ yield src
+ yield util.uvarintencode(len(name))
+ if ftype == _fileappend:
+ fp = vfs(name)
+ size = data
+ elif ftype == _filefull:
+ fp = open(data, 'rb')
+ size = util.fstat(fp).st_size
+ try:
+ yield util.uvarintencode(size)
+ yield name
+ if size <= 65536:
+ chunks = (fp.read(size),)
+ else:
+ chunks = util.filechunkiter(fp, limit=size)
+ for chunk in chunks:
+ seen += len(chunk)
+ progress(_('bundle'), seen, total=totalfilesize,
+ unit=_('bytes'))
+ yield chunk
+ finally:
+ fp.close()
+ finally:
+ progress(_('bundle'), None)
+
+def generatev2(repo):
+ """Emit content for version 2 of a streaming clone.
+
+ the data stream consists the following entries:
+ 1) A char representing the file destination (eg: store or cache)
+ 2) A varint containing the length of the filename
+ 3) A varint containing the length of file data
+ 4) N bytes containing the filename (the internal, store-agnostic form)
+ 5) N bytes containing the file data
+
+ Returns a 3-tuple of (file count, file size, data iterator).
+ """
+
+ with repo.lock():
+
+ entries = []
+ totalfilesize = 0
+
+ repo.ui.debug('scanning\n')
+ for name, ename, size in _walkstreamfiles(repo):
+ if size:
+ entries.append((_srcstore, name, _fileappend, size))
+ totalfilesize += size
+ for name in _walkstreamfullstorefiles(repo):
+ if repo.svfs.exists(name):
+ totalfilesize += repo.svfs.lstat(name).st_size
+ entries.append((_srcstore, name, _filefull, None))
+ for name in cacheutil.cachetocopy(repo):
+ if repo.cachevfs.exists(name):
+ totalfilesize += repo.cachevfs.lstat(name).st_size
+ entries.append((_srccache, name, _filefull, None))
+
+ chunks = _emit(repo, entries, totalfilesize)
+ first = next(chunks)
+ assert first is None
+
+ return len(entries), totalfilesize, chunks
+
+@contextlib.contextmanager
+def nested(*ctxs):
+ with warnings.catch_warnings():
+ # For some reason, Python decided 'nested' was deprecated without
+ # replacement. They officially advertised for filtering the deprecation
+ # warning for people who actually need the feature.
+ warnings.filterwarnings("ignore",category=DeprecationWarning)
+ with contextlib.nested(*ctxs):
+ yield
+
+def consumev2(repo, fp, filecount, filesize):
+ """Apply the contents from a version 2 streaming clone.
+
+ Data is read from an object that only needs to provide a ``read(size)``
+ method.
+ """
+ with repo.lock():
+ repo.ui.status(_('%d files to transfer, %s of data\n') %
+ (filecount, util.bytecount(filesize)))
+
+ start = util.timer()
+ handledbytes = 0
+ progress = repo.ui.progress
+
+ progress(_('clone'), handledbytes, total=filesize, unit=_('bytes'))
+
+ vfsmap = _makemap(repo)
+
+ with repo.transaction('clone'):
+ ctxs = (vfs.backgroundclosing(repo.ui)
+ for vfs in vfsmap.values())
+ with nested(*ctxs):
+ for i in range(filecount):
+ src = fp.read(1)
+ vfs = vfsmap[src]
+ namelen = util.uvarintdecodestream(fp)
+ datalen = util.uvarintdecodestream(fp)
+
+ name = fp.read(namelen)
+
+ if repo.ui.debugflag:
+ repo.ui.debug('adding [%s] %s (%s)\n' %
+ (src, name, util.bytecount(datalen)))
+
+ with vfs(name, 'w') as ofp:
+ for chunk in util.filechunkiter(fp, limit=datalen):
+ handledbytes += len(chunk)
+ progress(_('clone'), handledbytes, total=filesize,
+ unit=_('bytes'))
+ ofp.write(chunk)
+
+ # force @filecache properties to be reloaded from
+ # streamclone-ed file at next access
+ repo.invalidate(clearfilecache=True)
+
+ elapsed = util.timer() - start
+ if elapsed <= 0:
+ elapsed = 0.001
+ progress(_('clone'), None)
+ repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
+ (util.bytecount(handledbytes), elapsed,
+ util.bytecount(handledbytes / elapsed)))
+
+def applybundlev2(repo, fp, filecount, filesize, requirements):
+ missingreqs = [r for r in requirements if r not in repo.supported]
+ if missingreqs:
+ raise error.Abort(_('unable to apply stream clone: '
+ 'unsupported format: %s') %
+ ', '.join(sorted(missingreqs)))
+
+ consumev2(repo, fp, filecount, filesize)
--- a/mercurial/subrepo.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/subrepo.py Mon Jan 22 17:53:02 2018 -0500
@@ -55,13 +55,13 @@
def _getstorehashcachename(remotepath):
'''get a unique filename for the store hash cache of a remote repository'''
- return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
+ return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
class SubrepoAbort(error.Abort):
"""Exception class used to avoid handling a subrepo error more than once"""
def __init__(self, *args, **kw):
- self.subrepo = kw.pop('subrepo', None)
- self.cause = kw.pop('cause', None)
+ self.subrepo = kw.pop(r'subrepo', None)
+ self.cause = kw.pop(r'cause', None)
error.Abort.__init__(self, *args, **kw)
def annotatesubrepoerror(func):
@@ -389,24 +389,44 @@
if util.safehasattr(repo, '_subparent'):
source = util.url(repo._subsource)
if source.isabs():
- return str(source)
+ return bytes(source)
source.path = posixpath.normpath(source.path)
parent = _abssource(repo._subparent, push, abort=False)
if parent:
parent = util.url(util.pconvert(parent))
parent.path = posixpath.join(parent.path or '', source.path)
parent.path = posixpath.normpath(parent.path)
- return str(parent)
+ return bytes(parent)
else: # recursion reached top repo
+ path = None
if util.safehasattr(repo, '_subtoppath'):
- return repo._subtoppath
- if push and repo.ui.config('paths', 'default-push'):
- return repo.ui.config('paths', 'default-push')
- if repo.ui.config('paths', 'default'):
- return repo.ui.config('paths', 'default')
- if repo.shared():
- # chop off the .hg component to get the default path form
+ path = repo._subtoppath
+ elif push and repo.ui.config('paths', 'default-push'):
+ path = repo.ui.config('paths', 'default-push')
+ elif repo.ui.config('paths', 'default'):
+ path = repo.ui.config('paths', 'default')
+ elif repo.shared():
+ # chop off the .hg component to get the default path form. This has
+ # already run through vfsmod.vfs(..., realpath=True), so it doesn't
+ # have problems with 'C:'
return os.path.dirname(repo.sharedpath)
+ if path:
+ # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is
+ # as expected: an absolute path to the root of the C: drive. The
+ # latter is a relative path, and works like so:
+ #
+ # C:\>cd C:\some\path
+ # C:\>D:
+ # D:\>python -c "import os; print os.path.abspath('C:')"
+ # C:\some\path
+ #
+ # D:\>python -c "import os; print os.path.abspath('C:relative')"
+ # C:\some\path\relative
+ if util.hasdriveletter(path):
+ if len(path) == 2 or path[2:3] not in br'\/':
+ path = os.path.abspath(path)
+ return path
+
if abort:
raise error.Abort(_("default path for subrepository not found"))
@@ -789,7 +809,7 @@
yield '# %s\n' % _expandedabspath(remotepath)
vfs = self._repo.vfs
for relname in filelist:
- filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
+ filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
yield '%s = %s\n' % (relname, filehash)
@propertycache
@@ -811,7 +831,7 @@
with self._repo.lock():
storehash = list(self._calcstorehash(remotepath))
vfs = self._cachestorehashvfs
- vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
+ vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
def _getctx(self):
'''fetch the context for this subrepo revision, possibly a workingctx
@@ -841,11 +861,7 @@
if defpath != defpushpath:
addpathconfig('default-push', defpushpath)
- fp = self._repo.vfs("hgrc", "w", text=True)
- try:
- fp.write(''.join(lines))
- finally:
- fp.close()
+ self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
@annotatesubrepoerror
def add(self, ui, match, prefix, explicitonly, **opts):
@@ -1154,24 +1170,24 @@
# 2. update the subrepo to the revision specified in
# the corresponding substate dictionary
self.ui.status(_('reverting subrepo %s\n') % substate[0])
- if not opts.get('no_backup'):
+ if not opts.get(r'no_backup'):
# Revert all files on the subrepo, creating backups
# Note that this will not recursively revert subrepos
# We could do it if there was a set:subrepos() predicate
opts = opts.copy()
- opts['date'] = None
- opts['rev'] = substate[1]
+ opts[r'date'] = None
+ opts[r'rev'] = substate[1]
self.filerevert(*pats, **opts)
# Update the repo to the revision specified in the given substate
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
self.get(substate, overwrite=True)
def filerevert(self, *pats, **opts):
- ctx = self._repo[opts['rev']]
+ ctx = self._repo[opts[r'rev']]
parents = self._repo.dirstate.parents()
- if opts.get('all'):
+ if opts.get(r'all'):
pats = ['set:modified()']
else:
pats = []
@@ -1244,7 +1260,7 @@
if not self.ui.interactive():
# Making stdin be a pipe should prevent svn from behaving
# interactively even if we can't pass --non-interactive.
- extrakw['stdin'] = subprocess.PIPE
+ extrakw[r'stdin'] = subprocess.PIPE
# Starting in svn 1.5 --non-interactive is a global flag
# instead of being per-command, but we need to support 1.4 so
# we have to be intelligent about what commands take
@@ -1284,6 +1300,9 @@
raise error.Abort(_('cannot retrieve svn tool version'))
return (int(m.group(1)), int(m.group(2)))
+ def _svnmissing(self):
+ return not self.wvfs.exists('.svn')
+
def _wcrevs(self):
# Get the working directory revision as well as the last
# commit revision so we can compare the subrepo state with
@@ -1331,7 +1350,10 @@
return True, True, bool(missing)
return bool(changes), False, bool(missing)
+ @annotatesubrepoerror
def dirty(self, ignoreupdate=False, missing=False):
+ if self._svnmissing():
+ return self._state[1] != ''
wcchanged = self._wcchanged()
changed = wcchanged[0] or (missing and wcchanged[2])
if not changed:
--- a/mercurial/templatefilters.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templatefilters.py Mon Jan 22 17:53:02 2018 -0500
@@ -348,6 +348,11 @@
"""Date. Returns a date like "2006-09-18"."""
return util.shortdate(text)
+@templatefilter('slashpath')
+def slashpath(path):
+ """Any text. Replaces the native path separator with slash."""
+ return util.pconvert(path)
+
@templatefilter('splitlines')
def splitlines(text):
"""Any text. Split text into a list of lines."""
--- a/mercurial/templatekw.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templatekw.py Mon Jan 22 17:53:02 2018 -0500
@@ -17,6 +17,7 @@
encoding,
error,
hbisect,
+ i18n,
obsutil,
patch,
pycompat,
@@ -301,6 +302,30 @@
return getrenamed
+def getlogcolumns():
+ """Return a dict of log column labels"""
+ _ = pycompat.identity # temporarily disable gettext
+ # i18n: column positioning for "hg log"
+ columns = _('bookmark: %s\n'
+ 'branch: %s\n'
+ 'changeset: %s\n'
+ 'copies: %s\n'
+ 'date: %s\n'
+ 'extra: %s=%s\n'
+ 'files+: %s\n'
+ 'files-: %s\n'
+ 'files: %s\n'
+ 'instability: %s\n'
+ 'manifest: %s\n'
+ 'obsolete: %s\n'
+ 'parent: %s\n'
+ 'phase: %s\n'
+ 'summary: %s\n'
+ 'tag: %s\n'
+ 'user: %s\n')
+ return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()],
+ i18n._(columns).splitlines(True)))
+
# default templates internally used for rendering of lists
defaulttempl = {
'parent': '{rev}:{node|formatnode} ',
@@ -513,6 +538,8 @@
return '@'
elif ctx.obsolete():
return 'x'
+ elif ctx.isunstable():
+ return '*'
elif ctx.closesbranch():
return '_'
else:
@@ -608,6 +635,7 @@
# the verbosity templatekw available.
succsandmarkers = showsuccsandmarkers(**args)
+ args = pycompat.byteskwargs(args)
ui = args['ui']
values = []
@@ -816,7 +844,7 @@
@templatekeyword('phaseidx')
def showphaseidx(repo, ctx, templ, **args):
- """Integer. The changeset phase index."""
+ """Integer. The changeset phase index. (ADVANCED)"""
return ctx.phase()
@templatekeyword('rev')
@@ -860,12 +888,6 @@
"""List of strings. Any tags associated with the changeset."""
return shownames('tags', **args)
-def loadkeyword(ui, extname, registrarobj):
- """Load template keyword from specified registrarobj
- """
- for name, func in registrarobj._table.iteritems():
- keywords[name] = func
-
@templatekeyword('termwidth')
def showtermwidth(repo, ctx, templ, **args):
"""Integer. The width of the current terminal."""
@@ -891,5 +913,24 @@
return showlist('instability', args['ctx'].instabilities(), args,
plural='instabilities')
+@templatekeyword('verbosity')
+def showverbosity(ui, **args):
+ """String. The current output verbosity in 'debug', 'quiet', 'verbose',
+ or ''."""
+ # see cmdutil.changeset_templater for priority of these flags
+ if ui.debugflag:
+ return 'debug'
+ elif ui.quiet:
+ return 'quiet'
+ elif ui.verbose:
+ return 'verbose'
+ return ''
+
+def loadkeyword(ui, extname, registrarobj):
+ """Load template keyword from specified registrarobj
+ """
+ for name, func in registrarobj._table.iteritems():
+ keywords[name] = func
+
# tell hggettext to extract docstrings from these functions:
i18nfunctions = keywords.values()
--- a/mercurial/templater.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templater.py Mon Jan 22 17:53:02 2018 -0500
@@ -184,6 +184,8 @@
return parsed, n + 1
parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
+ if not tmpl.endswith('}', n + 1, pos):
+ raise error.ParseError(_("invalid token"), pos)
parsed.append(parseres)
if quote:
@@ -257,6 +259,8 @@
def compileexp(exp, context, curmethods):
"""Compile parsed template tree to (func, data) pair"""
+ if not exp:
+ raise error.ParseError(_("missing argument"))
t = exp[0]
if t in curmethods:
return curmethods[t](exp, context)
@@ -382,9 +386,7 @@
raise error.Abort(_("recursive reference '%s' in template") % key)
def runsymbol(context, mapping, key, default=''):
- v = mapping.get(key)
- if v is None:
- v = context._defaults.get(key)
+ v = context.symbol(mapping, key)
if v is None:
# put poison to cut recursion. we can't move this to parsing phase
# because "x = {x}" is allowed if "x" is a keyword. (issue4758)
@@ -395,7 +397,11 @@
except TemplateNotFound:
v = default
if callable(v):
- return v(**pycompat.strkwargs(mapping))
+ # TODO: templatekw functions will be updated to take (context, mapping)
+ # pair instead of **props
+ props = context._resources.copy()
+ props.update(mapping)
+ return v(**pycompat.strkwargs(props))
return v
def buildtemplate(exp, context):
@@ -626,7 +632,7 @@
return [s]
return []
- ctx = mapping['ctx']
+ ctx = context.resource(mapping, 'ctx')
chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
return ''.join(chunks)
@@ -639,8 +645,8 @@
raise error.ParseError(_('extdata expects one argument'))
source = evalstring(context, mapping, args['source'])
- cache = mapping['cache'].setdefault('extdata', {})
- ctx = mapping['ctx']
+ cache = context.resource(mapping, 'cache').setdefault('extdata', {})
+ ctx = context.resource(mapping, 'ctx')
if source in cache:
data = cache[source]
else:
@@ -656,10 +662,13 @@
raise error.ParseError(_("files expects one argument"))
raw = evalstring(context, mapping, args[0])
- ctx = mapping['ctx']
+ ctx = context.resource(mapping, 'ctx')
m = ctx.match([raw])
files = list(ctx.matches(m))
- return templatekw.showlist("file", files, mapping)
+ # TODO: pass (context, mapping) pair to keyword function
+ props = context._resources.copy()
+ props.update(mapping)
+ return templatekw.showlist("file", files, props)
@templatefunc('fill(text[, width[, initialident[, hangindent]]])')
def fill(context, mapping, args):
@@ -692,7 +701,7 @@
# i18n: "formatnode" is a keyword
raise error.ParseError(_("formatnode expects one argument"))
- ui = mapping['ui']
+ ui = context.resource(mapping, 'ui')
node = evalstring(context, mapping, args[0])
if ui.debugflag:
return node
@@ -858,7 +867,7 @@
# i18n: "label" is a keyword
raise error.ParseError(_("label expects two arguments"))
- ui = mapping['ui']
+ ui = context.resource(mapping, 'ui')
thing = evalstring(context, mapping, args[1])
# preserve unknown symbol as literal so effects like 'red', 'bold',
# etc. don't need to be quoted
@@ -880,7 +889,10 @@
if len(args) == 1:
pattern = evalstring(context, mapping, args[0])
- return templatekw.showlatesttags(pattern, **mapping)
+ # TODO: pass (context, mapping) pair to keyword function
+ props = context._resources.copy()
+ props.update(mapping)
+ return templatekw.showlatesttags(pattern, **pycompat.strkwargs(props))
@templatefunc('localdate(date[, tz])')
def localdate(context, mapping, args):
@@ -1005,17 +1017,18 @@
"obsmakers")
raise error.ParseError(msg)
-@templatefunc('obsfateverb(successors)')
+@templatefunc('obsfateverb(successors, markers)')
def obsfateverb(context, mapping, args):
"""Compute obsfate related information based on successors (EXPERIMENTAL)"""
- if len(args) != 1:
+ if len(args) != 2:
# i18n: "obsfateverb" is a keyword
- raise error.ParseError(_("obsfateverb expects one arguments"))
+ raise error.ParseError(_("obsfateverb expects two arguments"))
successors = evalfuncarg(context, mapping, args[0])
+ markers = evalfuncarg(context, mapping, args[1])
try:
- return obsutil.successorsetverb(successors)
+ return obsutil.obsfateverb(successors, markers)
except TypeError:
# i18n: "obsfateverb" is a keyword
errmsg = _("obsfateverb first argument should be countable")
@@ -1029,7 +1042,7 @@
# i18n: "relpath" is a keyword
raise error.ParseError(_("relpath expects one argument"))
- repo = mapping['ctx'].repo()
+ repo = context.resource(mapping, 'ctx').repo()
path = evalstring(context, mapping, args[0])
return repo.pathto(path)
@@ -1042,7 +1055,7 @@
raise error.ParseError(_("revset expects one or more arguments"))
raw = evalstring(context, mapping, args[0])
- ctx = mapping['ctx']
+ ctx = context.resource(mapping, 'ctx')
repo = ctx.repo()
def query(expr):
@@ -1054,7 +1067,8 @@
revs = query(revsetlang.formatspec(raw, *formatargs))
revs = list(revs)
else:
- revsetcache = mapping['cache'].setdefault("revsetcache", {})
+ cache = context.resource(mapping, 'cache')
+ revsetcache = cache.setdefault("revsetcache", {})
if raw in revsetcache:
revs = revsetcache[raw]
else:
@@ -1062,7 +1076,11 @@
revs = list(revs)
revsetcache[raw] = revs
- return templatekw.showrevslist("revision", revs, **mapping)
+ # TODO: pass (context, mapping) pair to keyword function
+ props = context._resources.copy()
+ props.update(mapping)
+ return templatekw.showrevslist("revision", revs,
+ **pycompat.strkwargs(props))
@templatefunc('rstdoc(text, style)')
def rstdoc(context, mapping, args):
@@ -1114,7 +1132,7 @@
# _partialmatch() of filtered changelog could take O(len(repo)) time,
# which would be unacceptably slow. so we look for hash collision in
# unfiltered space, which means some hashes may be slightly longer.
- cl = mapping['ctx']._repo.unfiltered().changelog
+ cl = context.resource(mapping, 'ctx')._repo.unfiltered().changelog
return cl.shortest(node, minlength)
@templatefunc('strip(text[, chars])')
@@ -1289,17 +1307,42 @@
filter uses function to transform value. syntax is
{key|filter1|filter2|...}.'''
- def __init__(self, loader, filters=None, defaults=None, aliases=()):
+ def __init__(self, loader, filters=None, defaults=None, resources=None,
+ aliases=()):
self._loader = loader
if filters is None:
filters = {}
self._filters = filters
if defaults is None:
defaults = {}
+ if resources is None:
+ resources = {}
self._defaults = defaults
+ self._resources = resources
self._aliasmap = _aliasrules.buildmap(aliases)
self._cache = {} # key: (func, data)
+ def symbol(self, mapping, key):
+ """Resolve symbol to value or function; None if nothing found"""
+ v = None
+ if key not in self._resources:
+ v = mapping.get(key)
+ if v is None:
+ v = self._defaults.get(key)
+ return v
+
+ def resource(self, mapping, key):
+ """Return internal data (e.g. cache) used for keyword/function
+ evaluation"""
+ v = None
+ if key in self._resources:
+ v = mapping.get(key)
+ if v is None:
+ v = self._resources.get(key)
+ if v is None:
+ raise error.Abort(_('template resource not available: %s') % key)
+ return v
+
def _load(self, t):
'''load, parse, and cache a template'''
if t not in self._cache:
@@ -1393,17 +1436,27 @@
class templater(object):
- def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
- minchunk=1024, maxchunk=65536):
- '''set up template engine.
- filters is dict of functions. each transforms a value into another.
- defaults is dict of default map definitions.
- aliases is list of alias (name, replacement) pairs.
- '''
+ def __init__(self, filters=None, defaults=None, resources=None,
+ cache=None, aliases=(), minchunk=1024, maxchunk=65536):
+ """Create template engine optionally with preloaded template fragments
+
+ - ``filters``: a dict of functions to transform a value into another.
+ - ``defaults``: a dict of symbol values/functions; may be overridden
+ by a ``mapping`` dict.
+ - ``resources``: a dict of internal data (e.g. cache), inaccessible
+ from user template; may be overridden by a ``mapping`` dict.
+ - ``cache``: a dict of preloaded template fragments.
+ - ``aliases``: a list of alias (name, replacement) pairs.
+
+ self.cache may be updated later to register additional template
+ fragments.
+ """
if filters is None:
filters = {}
if defaults is None:
defaults = {}
+ if resources is None:
+ resources = {}
if cache is None:
cache = {}
self.cache = cache.copy()
@@ -1411,15 +1464,17 @@
self.filters = templatefilters.filters.copy()
self.filters.update(filters)
self.defaults = defaults
+ self._resources = {'templ': self}
+ self._resources.update(resources)
self._aliases = aliases
self.minchunk, self.maxchunk = minchunk, maxchunk
self.ecache = {}
@classmethod
- def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
- minchunk=1024, maxchunk=65536):
+ def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None,
+ cache=None, minchunk=1024, maxchunk=65536):
"""Create templater from the specified map file"""
- t = cls(filters, defaults, cache, [], minchunk, maxchunk)
+ t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
cache, tmap, aliases = _readmapfile(mapfile)
t.cache.update(cache)
t.map = tmap
@@ -1456,7 +1511,7 @@
except KeyError:
raise error.Abort(_('invalid template engine: %s') % ttype)
self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
- self._aliases)
+ self._resources, self._aliases)
proc = self.ecache[ttype]
stream = proc.process(t, mapping)
--- a/mercurial/templates/gitweb/changelogentry.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/changelogentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -1,5 +1,9 @@
<div>
-<a class="title" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}"><span class="age">{date|rfc822date}</span>{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a>
+ <a class="title" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
+ <span class="age">{date|rfc822date}</span>
+ {desc|strip|firstline|escape|nonempty}
+ {alltags}
+ </a>
</div>
<div class="title_text">
<div class="log_link">
--- a/mercurial/templates/gitweb/changeset.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/changeset.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -30,7 +30,10 @@
</div>
<div>
-<a class="title" href="{url|urlescape}raw-rev/{node|short}">{desc|strip|escape|firstline|nonempty} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a>
+ <a class="title" href="{url|urlescape}raw-rev/{node|short}">
+ {desc|strip|escape|firstline|nonempty}
+ {alltags}
+ </a>
</div>
<div class="title_text">
<table cellspacing="0">
@@ -41,6 +44,7 @@
<td>changeset {rev}</td>
<td style="font-family:monospace"><a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
</tr>
+{if(obsolete, '<tr><td>obsolete</td><td>{succsandmarkers%obsfateentry}</td></tr>')}
{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
{child%changesetchild}
</table></div>
--- a/mercurial/templates/gitweb/filelog.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/filelog.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -36,7 +36,7 @@
<div class="title" >
{file|urlescape}{if(linerange,
-' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">back to filelog</a>)')}
+' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">all revisions for this file</a>)')}
</div>
<table>
--- a/mercurial/templates/gitweb/graph.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/graph.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -4,7 +4,6 @@
href="{url|urlescape}atom-log" title="Atom feed for {repo|escape}"/>
<link rel="alternate" type="application/rss+xml"
href="{url|urlescape}rss-log" title="RSS feed for {repo|escape}"/>
-<!--[if IE]><script type="text/javascript" src="{staticurl|urlescape}excanvas.js"></script><![endif]-->
</head>
<body>
@@ -37,66 +36,15 @@
<noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
<div id="wrapper">
-<ul id="nodebgs"></ul>
-<canvas id="graph" width="{canvaswidth}" height="{canvasheight}"></canvas>
-<ul id="graphnodes"></ul>
+<canvas id="graph"></canvas>
+<ul id="graphnodes">{nodes%graphentry}</ul>
</div>
<script{if(nonce, ' nonce="{nonce}"')}>
-<!-- hide script content
-
var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
-
-graph.vertex = function(x, y, color, parity, cur) \{
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg parity' + parity + '"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
-
- var tagspan = '';
- if (cur[7].length || cur[8].length || (cur[6][0] != 'default' || cur[6][1])) \{
- tagspan = '<span class="logtags">';
- if (cur[6][1]) \{
- tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- } else if (!cur[6][1] && cur[6][0] != 'default') \{
- tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- }
- if (cur[7].length) \{
- for (var t in cur[7]) \{
- var tag = cur[7][t];
- tagspan += '<span class="tagtag">' + tag + '</span> ';
- }
- }
- if (cur[8].length) \{
- for (var t in cur[8]) \{
- var bookmark = cur[8][t];
- tagspan += '<span class="bookmarktag">' + bookmark + '</span> ';
- }
- }
- tagspan += '</span>';
- }
-
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a class="list" href="{url|urlescape}rev/' + cur[0] + '{sessionvars%urlparameter}" title="' + cur[0] + '"><b>' + cur[3] + '</b></a>';
- item += '</span> ' + tagspan + '';
- item += '<span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
-}
-
graph.render(data);
-
-// stop hiding script -->
</script>
<div class="extra_nav">
@@ -107,9 +55,12 @@
<script type="text/javascript"{if(nonce, ' nonce="{nonce}"')}>
ajaxScrollInit(
- '{url|urlescape}graph/{rev}?revcount=%next%&style={style}',
- {revcount}+60,
- function (htmlText, previousVal) \{ return previousVal + 60; },
+ '{url|urlescape}graph/%next%{graphvars%urlparameter}',
+ '{nextentry%"{node}"}', <!-- NEXTHASH
+ function (htmlText) \{
+ var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
+ return m ? m[1] : null;
+ },
'#wrapper',
'<div class="%class%" style="text-align: center;">%text%</div>',
'graph'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/gitweb/graphentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,9 @@
+<li class="parity{parity}" data-node="{node|short}">
+ <div class="fg">
+ <span class="desc">
+ <a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}"><b>{desc|strip|firstline|escape|nonempty}</b></a>
+ </span>
+ {alltags}
+ <div class="info">{date|age}, by {author|person}</div>
+ </div>
+</li>
--- a/mercurial/templates/gitweb/manifest.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/manifest.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -28,7 +28,7 @@
{searchform}
</div>
-<div class="title">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></div>
+<div class="title">{path|escape} {alltags}</div>
<table cellspacing="0">
<tr class="parity{upparity}">
<td style="font-family:monospace">drwxr-xr-x</td>
--- a/mercurial/templates/gitweb/map Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/map Mon Jan 22 17:53:02 2018 -0500
@@ -262,10 +262,20 @@
</tr>'
shortlog = shortlog.tmpl
graph = graph.tmpl
+graphentry = graphentry.tmpl
+phasetag = '{ifeq(phase, 'public', '', '<span class="phasetag" title="{phase|escape}">{phase|escape}</span> ')}'
+obsoletetag = '{if(obsolete, '<span class="obsoletetag" title="obsolete">obsolete</span> ')}'
+instabilitytag = '<span class="instabilitytag" title="{instability|escape}">{instability|escape}</span> '
tagtag = '<span class="tagtag" title="{name|escape}">{name|escape}</span> '
branchtag = '<span class="branchtag" title="{name|escape}">{name|escape}</span> '
inbranchtag = '<span class="inbranchtag" title="{name|escape}">{name|escape}</span> '
bookmarktag = '<span class="bookmarktag" title="{name|escape}">{name|escape}</span> '
+alltags = '<span class="logtags">{phasetag}{obsoletetag}{instabilities%instabilitytag}{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>'
+successorlink = '<a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+obsfatesuccessors = '{if(successors, ' as ')}{successors%successorlink}'
+obsfateverb = '{obsfateverb(successors, markers)}'
+obsfateoperations = '{if(obsfateoperations(markers), ' using {join(obsfateoperations(markers), ', ')}')}'
+obsfateentry = '{obsfateverb}{obsfateoperations}{obsfatesuccessors}'
shortlogentry = '
<tr class="parity{parity}">
<td class="age"><i class="age">{date|rfc822date}</i></td>
@@ -273,7 +283,7 @@
<td>
<a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
<b>{desc|strip|firstline|escape|nonempty}</b>
- <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>
+ {alltags}
</a>
</td>
<td class="link" nowrap>
@@ -288,7 +298,7 @@
<td>
<a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
<b>{desc|strip|firstline|escape|nonempty}</b>
- <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>
+ {alltags}
</a>
</td>
<td class="link">
--- a/mercurial/templates/gitweb/shortlog.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/shortlog.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -41,7 +41,7 @@
ajaxScrollInit(
'{url|urlescape}shortlog/%next%{sessionvars%urlparameter}',
'{nextentry%"{node}"}', <!-- NEXTHASH
- function (htmlText, previousVal) \{
+ function (htmlText) \{
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
--- a/mercurial/templates/gitweb/summary.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/gitweb/summary.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -31,7 +31,7 @@
<table cellspacing="0">
<tr><td>description</td><td>{desc}</td></tr>
<tr><td>owner</td><td>{owner|obfuscate}</td></tr>
-<tr><td>last change</td><td>{lastchange|rfc822date}</td></tr>
+<tr><td>last change</td><td class="date age">{lastchange|rfc822date}</td></tr>
</table>
<div><a class="title" href="{url|urlescape}shortlog{sessionvars%urlparameter}">changes</a></div>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/json/graph.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,5 @@
+\{
+ "node": {node|json},
+ "changeset_count": {changesets|json},
+ "changesets": [{join(nodes%graphentry, ", ")}]
+}
--- a/mercurial/templates/json/map Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/json/map Mon Jan 22 17:53:02 2018 -0500
@@ -25,6 +25,7 @@
# number of entries.
changelog = changelist.tmpl
shortlog = changelist.tmpl
+graph = graph.tmpl
changelistentry = '\{
"node": {node|json},
"date": {date|json},
@@ -37,6 +38,22 @@
"parents": [{if(allparents, join(allparents%changesetparent, ", "),
join(parent%changesetparent, ", "))}]
}'
+graphentry = '\{
+ "node": {node|json},
+ "date": {date|json},
+ "desc": {desc|utf8|json},
+ "branch": {if(branch, branch%changesetbranch, "default"|json)},
+ "bookmarks": [{join(bookmarks%changelistentryname, ", ")}],
+ "tags": [{join(tags%changelistentryname, ", ")}],
+ "user": {author|utf8|json},
+ "phase": {phase|json},
+ "col": {col|json},
+ "row": {row|json},
+ "color": {color|json},
+ "edges": {edges|json},
+ "parents": [{if(allparents, join(allparents%changesetparent, ", "),
+ join(parent%changesetparent, ", "))}]
+ }'
changelistentryname = '{name|utf8|json}'
changeset = '\{
"node": {node|json},
@@ -198,7 +215,6 @@
filelog = '\{
"entries": [{join(entries%changelistentry, ", ")}]
}'
-graph = '"not yet implemented"'
helptopics = '\{
"topics": [{join(topics%helptopicentry, ", ")}],
"earlycommands": [{join(earlycommands%helptopicentry, ", ")}],
--- a/mercurial/templates/monoblue/changelogentry.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/changelogentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -1,4 +1,9 @@
-<h3 class="changelog"><a class="title" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a></h3>
+<h3 class="changelog">
+ <a class="title" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
+ {desc|strip|firstline|escape|nonempty}
+ {alltags}
+ </a>
+</h3>
<ul class="changelog-entry">
<li class="age">{date|rfc822date}</li>
<li>by <span class="name">{author|obfuscate}</span> <span class="revdate">[{date|rfc822date}] rev {rev}</span></li>
--- a/mercurial/templates/monoblue/changeset.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/changeset.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -32,17 +32,23 @@
<h2 class="no-link no-border">changeset</h2>
- <h3 class="changeset"><a href="{url|urlescape}raw-rev/{node|short}">{desc|strip|escape|firstline|nonempty} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a></h3>
+ <h3 class="changeset">
+ <a href="{url|urlescape}raw-rev/{node|short}">
+ {desc|strip|escape|firstline|nonempty}
+ {alltags}
+ </a>
+ </h3>
<p class="changeset-age"><span class="age">{date|rfc822date}</span></p>
<dl class="overview">
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd class="date age">{date|rfc822date}</dd>
{branch%changesetbranch}
<dt>changeset {rev}</dt>
<dd><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
+ {if(obsolete, '<dt>obsolete</dt><dd>{succsandmarkers%obsfateentry}</dd>')}
{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
{child%changesetchild}
</dl>
--- a/mercurial/templates/monoblue/fileannotate.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/fileannotate.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -42,7 +42,7 @@
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd class="date age">{date|rfc822date}</dd>
{branch%filerevbranch}
<dt>changeset {rev}</dt>
<dd><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
--- a/mercurial/templates/monoblue/filerevision.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/filerevision.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -42,7 +42,7 @@
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd class="date age">{date|rfc822date}</dd>
{branch%filerevbranch}
<dt>changeset {rev}</dt>
<dd><a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
--- a/mercurial/templates/monoblue/graph.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/graph.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -2,7 +2,6 @@
<title>{repo|escape}: graph</title>
<link rel="alternate" type="application/atom+xml" href="{url|urlescape}atom-log" title="Atom feed for {repo|escape}"/>
<link rel="alternate" type="application/rss+xml" href="{url|urlescape}rss-log" title="RSS feed for {repo|escape}"/>
- <!--[if IE]><script type="text/javascript" src="{staticurl|urlescape}excanvas.js"></script><![endif]-->
</head>
<body>
@@ -29,67 +28,17 @@
<div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div>
<div id="wrapper">
- <ul id="nodebgs"></ul>
- <canvas id="graph" width="{canvaswidth}" height="{canvasheight}"></canvas>
- <ul id="graphnodes"></ul>
+ <canvas id="graph"></canvas>
+ <ul id="graphnodes">{nodes%graphentry}</ul>
</div>
<script{if(nonce, ' nonce="{nonce}"')}>
- <!-- hide script content
-
document.getElementById('noscript').style.display = 'none';
var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
-
- graph.vertex = function(x, y, color, parity, cur) \{
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg parity' + parity + '"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
-
- var tagspan = '';
- if (cur[7].length || cur[8].length || (cur[6][0] != 'default' || cur[6][1])) \{
- tagspan = '<span class="logtags">';
- if (cur[6][1]) \{
- tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- } else if (!cur[6][1] && cur[6][0] != 'default') \{
- tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- }
- if (cur[7].length) \{
- for (var t in cur[7]) \{
- var tag = cur[7][t];
- tagspan += '<span class="tagtag">' + tag + '</span> ';
- }
- }
- if (cur[8].length) \{
- for (var t in cur[8]) \{
- var bookmark = cur[8][t];
- tagspan += '<span class="bookmarktag">' + bookmark + '</span> ';
- }
- }
- tagspan += '</span>';
- }
-
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a href="{url|urlescape}rev/' + cur[0] + '{sessionvars%urlparameter}" title="' + cur[0] + '">' + cur[3] + '</a>';
- item += '</span>' + tagspan + '<span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
- }
-
graph.render(data);
-
- // stop hiding script -->
</script>
<div class="page-path">
@@ -100,9 +49,12 @@
<script type="text/javascript"{if(nonce, ' nonce="{nonce}"')}>
ajaxScrollInit(
- '{url|urlescape}graph/{rev}?revcount=%next%&style={style}',
- {revcount}+60,
- function (htmlText, previousVal) \{ return previousVal + 60; },
+ '{url|urlescape}graph/%next%{graphvars%urlparameter}',
+ '{nextentry%"{node}"}', <!-- NEXTHASH
+ function (htmlText) \{
+ var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
+ return m ? m[1] : null;
+ },
'#wrapper',
'<div class="%class%" style="text-align: center;">%text%</div>',
'graph'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/monoblue/graphentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,9 @@
+<li class="parity{parity}" data-node="{node|short}">
+ <div class="fg">
+ <span class="desc">
+ <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>
+ </span>
+ {alltags}
+ <div class="info"><span class="age">{date|rfc822date}</span>, by {author|person}</div>
+ </div>
+</li>
--- a/mercurial/templates/monoblue/manifest.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/manifest.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -30,7 +30,7 @@
</ul>
<h2 class="no-link no-border">files</h2>
- <p class="files">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></p>
+ <p class="files">{path|escape} {alltags}</p>
<table>
<tr class="parity{upparity}">
--- a/mercurial/templates/monoblue/map Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/map Mon Jan 22 17:53:02 2018 -0500
@@ -221,10 +221,19 @@
<dt>child {rev}</dt>
<dd><a href="{url|urlescape}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
shortlog = shortlog.tmpl
+phasetag = '{ifeq(phase, 'public', '', '<span class="phasetag" title="{phase|escape}">{phase|escape}</span> ')}'
+obsoletetag = '{if(obsolete, '<span class="obsoletetag" title="obsolete">obsolete</span> ')}'
+instabilitytag = '<span class="instabilitytag" title="{instability|escape}">{instability|escape}</span> '
tagtag = '<span class="tagtag" title="{name|escape}">{name|escape}</span> '
branchtag = '<span class="branchtag" title="{name|escape}">{name|escape}</span> '
inbranchtag = '<span class="inbranchtag" title="{name|escape}">{name|escape}</span> '
bookmarktag = '<span class="bookmarktag" title="{name|escape}">{name|escape}</span> '
+alltags = '<span class="logtags">{phasetag}{obsoletetag}{instabilities%instabilitytag}{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>'
+successorlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+obsfatesuccessors = '{if(successors, ' as ')}{successors%successorlink}'
+obsfateverb = '{obsfateverb(successors, markers)}'
+obsfateoperations = '{if(obsfateoperations(markers), ' using {join(obsfateoperations(markers), ', ')}')}'
+obsfateentry = '{obsfateverb}{obsfateoperations}{obsfatesuccessors}'
shortlogentry = '
<tr class="parity{parity}">
<td class="nowrap age">{date|rfc822date}</td>
@@ -232,7 +241,7 @@
<td>
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
{desc|strip|firstline|escape|nonempty}
- <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>
+ {alltags}
</a>
</td>
<td class="nowrap">
@@ -247,7 +256,7 @@
<td>
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
{desc|strip|firstline|escape|nonempty}
- <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span>
+ {alltags}
</a>
</td>
<td class="nowrap">
@@ -278,6 +287,7 @@
urlparameter = '{separator}{name}={value|urlescape}'
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
graph = graph.tmpl
+graphentry = graphentry.tmpl
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
searchform = '
--- a/mercurial/templates/monoblue/shortlog.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/shortlog.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -39,7 +39,7 @@
ajaxScrollInit(
'{url|urlescape}shortlog/%next%{sessionvars%urlparameter}',
'{nextentry%"{node}"}', <!-- NEXTHASH
- function (htmlText, previousVal) \{
+ function (htmlText) \{
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
--- a/mercurial/templates/monoblue/summary.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/monoblue/summary.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -34,7 +34,7 @@
<dt>owner</dt>
<dd>{owner|obfuscate}</dd>
<dt>last change</dt>
- <dd>{lastchange|rfc822date}</dd>
+ <dd class="date age">{lastchange|rfc822date}</dd>
</dl>
<h2><a href="{url|urlescape}shortlog{sessionvars%urlparameter}">Changes</a></h2>
--- a/mercurial/templates/paper/changeset.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/changeset.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -33,7 +33,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
changeset {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {changesetbranch%changelogbranchname}{changesettag}{changesetbookmark}
+ {alltags}
</h3>
{searchform}
@@ -49,6 +49,10 @@
<th class="date">date</th>
<td class="date age">{date|rfc822date}</td>
</tr>
+{if(obsolete, '<tr>
+ <th>obsolete</th>
+ <td>{succsandmarkers%obsfateentry}</td>
+</tr>')}
<tr>
<th class="author">parents</th>
<td class="author">{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}</td>
--- a/mercurial/templates/paper/fileannotate.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/fileannotate.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -39,7 +39,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
annotate {file|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</h3>
{searchform}
--- a/mercurial/templates/paper/filecomparison.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/filecomparison.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
comparison {file|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</h3>
{searchform}
--- a/mercurial/templates/paper/filediff.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/filediff.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
diff {file|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</h3>
{searchform}
--- a/mercurial/templates/paper/filelog.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/filelog.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -46,9 +46,9 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
log {file|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
{if(linerange,
-' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">back to filelog</a>)')}
+' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">all revisions for this file</a>)')}
</h3>
{searchform}
--- a/mercurial/templates/paper/filelogentry.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/filelogentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
<td class="author">{author|person}</td>
<td class="description">
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>
- {inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{bookmarks%changelogtag}{rename%filelogrename}
+ {alltags}{rename%filelogrename}
</td>
</tr>
{if(patch, '<tr><td colspan="3">{diff}</td></tr>')}
--- a/mercurial/templates/paper/filerevision.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/filerevision.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
view {file|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</h3>
{searchform}
--- a/mercurial/templates/paper/graph.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/graph.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -4,7 +4,6 @@
href="{url|urlescape}atom-log" title="Atom feed for {repo|escape}: log" />
<link rel="alternate" type="application/rss+xml"
href="{url|urlescape}rss-log" title="RSS feed for {repo|escape}: log" />
-<!--[if IE]><script type="text/javascript" src="{staticurl|urlescape}excanvas.js"></script><![endif]-->
</head>
<body>
@@ -50,65 +49,15 @@
<noscript><p>The revision graph only works with JavaScript-enabled browsers.</p></noscript>
<div id="wrapper">
-<ul id="nodebgs" class="stripes2"></ul>
-<canvas id="graph" width="{canvaswidth}" height="{canvasheight}"></canvas>
-<ul id="graphnodes"></ul>
+<canvas id="graph"></canvas>
+<ul id="graphnodes" class="stripes2">{nodes%graphentry}</ul>
</div>
<script type="text/javascript"{if(nonce, ' nonce="{nonce}"')}>
-<!-- hide script content
-
var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
-
-graph.vertex = function(x, y, color, parity, cur) \{
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
-
- var tagspan = '';
- if (cur[7].length || cur[8].length || (cur[6][0] != 'default' || cur[6][1])) \{
- tagspan = '<span class="logtags">';
- if (cur[6][1]) \{
- tagspan += '<span class="branchhead" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- } else if (!cur[6][1] && cur[6][0] != 'default') \{
- tagspan += '<span class="branchname" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- }
- if (cur[7].length) \{
- for (var t in cur[7]) \{
- var tag = cur[7][t];
- tagspan += '<span class="tag">' + tag + '</span> ';
- }
- }
- if (cur[8].length) \{
- for (var b in cur[8]) \{
- var bookmark = cur[8][b];
- tagspan += '<span class="tag">' + bookmark + '</span> ';
- }
- }
- tagspan += '</span>';
- }
-
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a href="{url|urlescape}rev/' + cur[0] + '{sessionvars%urlparameter}" title="' + cur[0] + '">' + cur[3] + '</a>';
- item += '</span>' + tagspan + '<span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
-}
-
graph.render(data);
-
-// stop hiding script -->
</script>
<div class="navigate">
@@ -119,9 +68,12 @@
<script type="text/javascript"{if(nonce, ' nonce="{nonce}"')}>
ajaxScrollInit(
- '{url|urlescape}graph/{rev}?revcount=%next%&style={style}',
- {revcount}+60,
- function (htmlText, previousVal) \{ return previousVal + 60; },
+ '{url|urlescape}graph/%next%{graphvars%urlparameter}',
+ '{nextentry%"{node}"}', <!-- NEXTHASH
+ function (htmlText) \{
+ var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
+ return m ? m[1] : null;
+ },
'#wrapper',
'<div class="%class%" style="text-align: center;">%text%</div>',
'graph'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/paper/graphentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,9 @@
+<li data-node="{node|short}">
+ <div class="fg">
+ <span class="desc">
+ <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>
+ </span>
+ {alltags}
+ <div class="info"><span class="age">{date|rfc822date}</span>, by {author|person}</div>
+ </div>
+</li>
--- a/mercurial/templates/paper/manifest.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/manifest.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -32,7 +32,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>
directory {path|escape} @ {rev}:<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
- {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</h3>
{searchform}
--- a/mercurial/templates/paper/map Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/map Mon Jan 22 17:53:02 2018 -0500
@@ -9,6 +9,7 @@
shortlog = shortlog.tmpl
shortlogentry = shortlogentry.tmpl
graph = graph.tmpl
+graphentry = graphentry.tmpl
help = help.tmpl
helptopics = helptopics.tmpl
@@ -198,11 +199,21 @@
</a>
</td>
</tr>'
+phasetag = '{ifeq(phase, 'public', '', '<span class="phase">{phase|escape}</span> ')}'
+obsoletetag = '{if(obsolete, '<span class="obsolete">obsolete</span> ')}'
+instabilitytag = '<span class="instability">{instability|escape}</span> '
changelogtag = '<span class="tag">{name|escape}</span> '
changesettag = '<span class="tag">{tag|escape}</span> '
changesetbookmark = '<span class="tag">{bookmark|escape}</span> '
changelogbranchhead = '<span class="branchhead">{name|escape}</span> '
changelogbranchname = '<span class="branchname">{name|escape}</span> '
+alltags = '{phasetag}{obsoletetag}{instabilities%instabilitytag}{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{bookmarks%changelogtag}'
+
+successorlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+obsfatesuccessors = '{if(successors, ' as ')}{successors%successorlink}'
+obsfateverb = '{obsfateverb(successors, markers)}'
+obsfateoperations = '{if(obsfateoperations(markers), ' using {join(obsfateoperations(markers), ', ')}')}'
+obsfateentry = '{obsfateverb}{obsfateoperations}{obsfatesuccessors}'
filediffparent = '
<tr>
--- a/mercurial/templates/paper/shortlog.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/shortlog.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -72,7 +72,7 @@
ajaxScrollInit(
'{url|urlescape}shortlog/%next%{sessionvars%urlparameter}',
'{nextentry%"{node}"}', <!-- NEXTHASH
- function (htmlText, previousVal) \{
+ function (htmlText) \{
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
--- a/mercurial/templates/paper/shortlogentry.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/paper/shortlogentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -3,6 +3,6 @@
<td class="author">{author|person}</td>
<td class="description">
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>
- {inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{bookmarks%changelogtag}
+ {alltags}
</td>
</tr>
--- a/mercurial/templates/raw/graphnode.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/raw/graphnode.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -1,7 +1,7 @@
-changeset: {node}
-user: {user}
-date: {age}
-summary: {desc}
+changeset: {node|short}
+user: {author|person}
+date: {date|age}
+summary: {desc|firstline|nonempty}
{branches%branchname}{tags%tagname}{bookmarks%bookmarkname}
node: ({col}, {row}) (color {color})
{edges%graphedge}
--- a/mercurial/templates/spartan/changelogentry.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/spartan/changelogentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -18,6 +18,18 @@
<th class="date">date:</th>
<td class="date">{date|rfc822date}</td>
</tr>
+ {ifeq(phase, 'public', '', '<tr>
+ <th class="phase">phase:</th>
+ <td class="phase">{phase|escape}</td>
+ </tr>')}
+ {if(obsolete, '<tr>
+ <th class="obsolete">obsolete:</th>
+ <td class="obsolete">{succsandmarkers%obsfateentry}</td>
+ </tr>')}
+ {ifeq(count(instabilities), '0', '', '<tr>
+ <th class="instabilities">instabilities:</th>
+ <td class="instabilities">{instabilities%"{instability} "|escape}</td>
+ </tr>')}
<tr>
<th class="files"><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>:</th>
<td class="files">{files}</td>
--- a/mercurial/templates/spartan/changeset.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/spartan/changeset.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -33,6 +33,18 @@
<th class="date">date:</th>
<td class="date age">{date|rfc822date}</td>
</tr>
+{ifeq(phase, 'public', '', '<tr>
+ <th class="phase">phase:</th>
+ <td class="phase">{phase|escape}</td>
+</tr>')}
+{if(obsolete, '<tr>
+ <th class="obsolete">obsolete:</th>
+ <td class="obsolete">{succsandmarkers%obsfateentry}</td>
+</tr>')}
+{ifeq(count(instabilities), '0', '', '<tr>
+ <th class="instabilities">instabilities:</th>
+ <td class="instabilities">{instabilities%"{instability} "|escape}</td>
+</tr>')}
<tr>
<th class="files">files:</th>
<td class="files">{files}</td>
--- a/mercurial/templates/spartan/graph.tmpl Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/spartan/graph.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -4,7 +4,6 @@
href="{url|urlescape}atom-tags" title="Atom feed for {repo|escape}: tags">
<link rel="alternate" type="application/rss+xml"
href="{url|urlescape}rss-tags" title="RSS feed for {repo|escape}: tags">
-<!--[if IE]><script type="text/javascript" src="{staticurl|urlescape}excanvas.js"></script><![endif]-->
</head>
<body>
@@ -31,39 +30,15 @@
<noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
<div id="wrapper">
-<ul id="nodebgs"></ul>
-<canvas id="graph" width="{canvaswidth}" height="{canvasheight}"></canvas>
-<ul id="graphnodes"></ul>
+<canvas id="graph"></canvas>
+<ul id="graphnodes">{nodes%graphentry}</ul>
</div>
<script type="text/javascript"{if(nonce, ' nonce="{nonce}"')}>
-<!-- hide script content
-
var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
-
-graph.vertex = function(x, y, color, parity, cur) \{
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg parity' + parity + '"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a href="{url|urlescape}rev/' + cur[0] + '{sessionvars%urlparameter}" title="' + cur[0] + '">' + cur[3] + '</a>';
- item += '</span><span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
-}
-
graph.render(data);
-
-// stop hiding script -->
</script>
<form action="{url|urlescape}log">
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/spartan/graphentry.tmpl Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,8 @@
+<li class="parity{parity}" data-node="{node|short}">
+ <div class="fg">
+ <span class="desc">
+ <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>
+ </span>
+ <div class="info"><span class="age">{date|rfc822date}</span>, by {author|person}</div>
+ </div>
+</li>
--- a/mercurial/templates/spartan/map Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/spartan/map Mon Jan 22 17:53:02 2018 -0500
@@ -7,6 +7,7 @@
shortlog = shortlog.tmpl
shortlogentry = shortlogentry.tmpl
graph = graph.tmpl
+graphentry = graphentry.tmpl
naventry = '<a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
navshortentry = '<a href="{url|urlescape}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
navgraphentry = '<a href="{url|urlescape}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
@@ -165,6 +166,11 @@
diffblock = '<pre class="parity{parity}">{lines}</pre>'
changelogtag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>'
changesettag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>'
+successorlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
+obsfatesuccessors = '{if(successors, ' as ')}{successors%successorlink}'
+obsfateverb = '{obsfateverb(successors, markers)}'
+obsfateoperations = '{if(obsfateoperations(markers), ' using {join(obsfateoperations(markers), ', ')}')}'
+obsfateentry = '{obsfateverb}{obsfateoperations}{obsfatesuccessors}'
filediffparent = '
<tr>
<th class="parent">parent {rev}:</th>
--- a/mercurial/templates/static/excanvas.js Mon Jan 08 16:07:51 2018 -0800
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,924 +0,0 @@
-// Copyright 2006 Google Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-
-// Known Issues:
-//
-// * Patterns are not implemented.
-// * Radial gradient are not implemented. The VML version of these look very
-// different from the canvas one.
-// * Clipping paths are not implemented.
-// * Coordsize. The width and height attribute have higher priority than the
-// width and height style values which isn't correct.
-// * Painting mode isn't implemented.
-// * Canvas width/height should is using content-box by default. IE in
-// Quirks mode will draw the canvas using border-box. Either change your
-// doctype to HTML5
-// (http://www.whatwg.org/specs/web-apps/current-work/#the-doctype)
-// or use Box Sizing Behavior from WebFX
-// (http://webfx.eae.net/dhtml/boxsizing/boxsizing.html)
-// * Non uniform scaling does not correctly scale strokes.
-// * Optimize. There is always room for speed improvements.
-
-// Only add this code if we do not already have a canvas implementation
-if (!document.createElement('canvas').getContext) {
-
-(function() {
-
- // alias some functions to make (compiled) code shorter
- var m = Math;
- var mr = m.round;
- var ms = m.sin;
- var mc = m.cos;
- var abs = m.abs;
- var sqrt = m.sqrt;
-
- // this is used for sub pixel precision
- var Z = 10;
- var Z2 = Z / 2;
-
- /**
- * This funtion is assigned to the <canvas> elements as element.getContext().
- * @this {HTMLElement}
- * @return {CanvasRenderingContext2D_}
- */
- function getContext() {
- return this.context_ ||
- (this.context_ = new CanvasRenderingContext2D_(this));
- }
-
- var slice = Array.prototype.slice;
-
- /**
- * Binds a function to an object. The returned function will always use the
- * passed in {@code obj} as {@code this}.
- *
- * Example:
- *
- * g = bind(f, obj, a, b)
- * g(c, d) // will do f.call(obj, a, b, c, d)
- *
- * @param {Function} f The function to bind the object to
- * @param {Object} obj The object that should act as this when the function
- * is called
- * @param {*} var_args Rest arguments that will be used as the initial
- * arguments when the function is called
- * @return {Function} A new function that has bound this
- */
- function bind(f, obj, var_args) {
- var a = slice.call(arguments, 2);
- return function() {
- return f.apply(obj, a.concat(slice.call(arguments)));
- };
- }
-
- var G_vmlCanvasManager_ = {
- init: function(opt_doc) {
- if (/MSIE/.test(navigator.userAgent) && !window.opera) {
- var doc = opt_doc || document;
- // Create a dummy element so that IE will allow canvas elements to be
- // recognized.
- doc.createElement('canvas');
- doc.attachEvent('onreadystatechange', bind(this.init_, this, doc));
- }
- },
-
- init_: function(doc) {
- // create xmlns
- if (!doc.namespaces['g_vml_']) {
- doc.namespaces.add('g_vml_', 'urn:schemas-microsoft-com:vml',
- '#default#VML');
-
- }
- if (!doc.namespaces['g_o_']) {
- doc.namespaces.add('g_o_', 'urn:schemas-microsoft-com:office:office',
- '#default#VML');
- }
-
- // Setup default CSS. Only add one style sheet per document
- if (!doc.styleSheets['ex_canvas_']) {
- var ss = doc.createStyleSheet();
- ss.owningElement.id = 'ex_canvas_';
- ss.cssText = 'canvas{display:inline-block;overflow:hidden;' +
- // default size is 300x150 in Gecko and Opera
- 'text-align:left;width:300px;height:150px}' +
- 'g_vml_\\:*{behavior:url(#default#VML)}' +
- 'g_o_\\:*{behavior:url(#default#VML)}';
-
- }
-
- // find all canvas elements
- var els = doc.getElementsByTagName('canvas');
- for (var i = 0; i < els.length; i++) {
- this.initElement(els[i]);
- }
- },
-
- /**
- * Public initializes a canvas element so that it can be used as canvas
- * element from now on. This is called automatically before the page is
- * loaded but if you are creating elements using createElement you need to
- * make sure this is called on the element.
- * @param {HTMLElement} el The canvas element to initialize.
- * @return {HTMLElement} the element that was created.
- */
- initElement: function(el) {
- if (!el.getContext) {
-
- el.getContext = getContext;
-
- // Remove fallback content. There is no way to hide text nodes so we
- // just remove all childNodes. We could hide all elements and remove
- // text nodes but who really cares about the fallback content.
- el.innerHTML = '';
-
- // do not use inline function because that will leak memory
- el.attachEvent('onpropertychange', onPropertyChange);
- el.attachEvent('onresize', onResize);
-
- var attrs = el.attributes;
- if (attrs.width && attrs.width.specified) {
- // TODO: use runtimeStyle and coordsize
- // el.getContext().setWidth_(attrs.width.nodeValue);
- el.style.width = attrs.width.nodeValue + 'px';
- } else {
- el.width = el.clientWidth;
- }
- if (attrs.height && attrs.height.specified) {
- // TODO: use runtimeStyle and coordsize
- // el.getContext().setHeight_(attrs.height.nodeValue);
- el.style.height = attrs.height.nodeValue + 'px';
- } else {
- el.height = el.clientHeight;
- }
- //el.getContext().setCoordsize_()
- }
- return el;
- }
- };
-
- function onPropertyChange(e) {
- var el = e.srcElement;
-
- switch (e.propertyName) {
- case 'width':
- el.style.width = el.attributes.width.nodeValue + 'px';
- el.getContext().clearRect();
- break;
- case 'height':
- el.style.height = el.attributes.height.nodeValue + 'px';
- el.getContext().clearRect();
- break;
- }
- }
-
- function onResize(e) {
- var el = e.srcElement;
- if (el.firstChild) {
- el.firstChild.style.width = el.clientWidth + 'px';
- el.firstChild.style.height = el.clientHeight + 'px';
- }
- }
-
- G_vmlCanvasManager_.init();
-
- // precompute "00" to "FF"
- var dec2hex = [];
- for (var i = 0; i < 16; i++) {
- for (var j = 0; j < 16; j++) {
- dec2hex[i * 16 + j] = i.toString(16) + j.toString(16);
- }
- }
-
- function createMatrixIdentity() {
- return [
- [1, 0, 0],
- [0, 1, 0],
- [0, 0, 1]
- ];
- }
-
- function matrixMultiply(m1, m2) {
- var result = createMatrixIdentity();
-
- for (var x = 0; x < 3; x++) {
- for (var y = 0; y < 3; y++) {
- var sum = 0;
-
- for (var z = 0; z < 3; z++) {
- sum += m1[x][z] * m2[z][y];
- }
-
- result[x][y] = sum;
- }
- }
- return result;
- }
-
- function copyState(o1, o2) {
- o2.fillStyle = o1.fillStyle;
- o2.lineCap = o1.lineCap;
- o2.lineJoin = o1.lineJoin;
- o2.lineWidth = o1.lineWidth;
- o2.miterLimit = o1.miterLimit;
- o2.shadowBlur = o1.shadowBlur;
- o2.shadowColor = o1.shadowColor;
- o2.shadowOffsetX = o1.shadowOffsetX;
- o2.shadowOffsetY = o1.shadowOffsetY;
- o2.strokeStyle = o1.strokeStyle;
- o2.globalAlpha = o1.globalAlpha;
- o2.arcScaleX_ = o1.arcScaleX_;
- o2.arcScaleY_ = o1.arcScaleY_;
- o2.lineScale_ = o1.lineScale_;
- }
-
- function processStyle(styleString) {
- var str, alpha = 1;
-
- styleString = String(styleString);
- if (styleString.substring(0, 3) == 'rgb') {
- var start = styleString.indexOf('(', 3);
- var end = styleString.indexOf(')', start + 1);
- var guts = styleString.substring(start + 1, end).split(',');
-
- str = '#';
- for (var i = 0; i < 3; i++) {
- str += dec2hex[Number(guts[i])];
- }
-
- if (guts.length == 4 && styleString.substr(3, 1) == 'a') {
- alpha = guts[3];
- }
- } else {
- str = styleString;
- }
-
- return {color: str, alpha: alpha};
- }
-
- function processLineCap(lineCap) {
- switch (lineCap) {
- case 'butt':
- return 'flat';
- case 'round':
- return 'round';
- case 'square':
- default:
- return 'square';
- }
- }
-
- /**
- * This class implements CanvasRenderingContext2D interface as described by
- * the WHATWG.
- * @param {HTMLElement} surfaceElement The element that the 2D context should
- * be associated with
- */
- function CanvasRenderingContext2D_(surfaceElement) {
- this.m_ = createMatrixIdentity();
-
- this.mStack_ = [];
- this.aStack_ = [];
- this.currentPath_ = [];
-
- // Canvas context properties
- this.strokeStyle = '#000';
- this.fillStyle = '#000';
-
- this.lineWidth = 1;
- this.lineJoin = 'miter';
- this.lineCap = 'butt';
- this.miterLimit = Z * 1;
- this.globalAlpha = 1;
- this.canvas = surfaceElement;
-
- var el = surfaceElement.ownerDocument.createElement('div');
- el.style.width = surfaceElement.clientWidth + 'px';
- el.style.height = surfaceElement.clientHeight + 'px';
- el.style.overflow = 'hidden';
- el.style.position = 'absolute';
- surfaceElement.appendChild(el);
-
- this.element_ = el;
- this.arcScaleX_ = 1;
- this.arcScaleY_ = 1;
- this.lineScale_ = 1;
- }
-
- var contextPrototype = CanvasRenderingContext2D_.prototype;
- contextPrototype.clearRect = function() {
- this.element_.innerHTML = '';
- };
-
- contextPrototype.beginPath = function() {
- // TODO: Branch current matrix so that save/restore has no effect
- // as per safari docs.
- this.currentPath_ = [];
- };
-
- contextPrototype.moveTo = function(aX, aY) {
- var p = this.getCoords_(aX, aY);
- this.currentPath_.push({type: 'moveTo', x: p.x, y: p.y});
- this.currentX_ = p.x;
- this.currentY_ = p.y;
- };
-
- contextPrototype.lineTo = function(aX, aY) {
- var p = this.getCoords_(aX, aY);
- this.currentPath_.push({type: 'lineTo', x: p.x, y: p.y});
-
- this.currentX_ = p.x;
- this.currentY_ = p.y;
- };
-
- contextPrototype.bezierCurveTo = function(aCP1x, aCP1y,
- aCP2x, aCP2y,
- aX, aY) {
- var p = this.getCoords_(aX, aY);
- var cp1 = this.getCoords_(aCP1x, aCP1y);
- var cp2 = this.getCoords_(aCP2x, aCP2y);
- bezierCurveTo(this, cp1, cp2, p);
- };
-
- // Helper function that takes the already fixed cordinates.
- function bezierCurveTo(self, cp1, cp2, p) {
- self.currentPath_.push({
- type: 'bezierCurveTo',
- cp1x: cp1.x,
- cp1y: cp1.y,
- cp2x: cp2.x,
- cp2y: cp2.y,
- x: p.x,
- y: p.y
- });
- self.currentX_ = p.x;
- self.currentY_ = p.y;
- }
-
- contextPrototype.quadraticCurveTo = function(aCPx, aCPy, aX, aY) {
- // the following is lifted almost directly from
- // http://developer.mozilla.org/en/docs/Canvas_tutorial:Drawing_shapes
-
- var cp = this.getCoords_(aCPx, aCPy);
- var p = this.getCoords_(aX, aY);
-
- var cp1 = {
- x: this.currentX_ + 2.0 / 3.0 * (cp.x - this.currentX_),
- y: this.currentY_ + 2.0 / 3.0 * (cp.y - this.currentY_)
- };
- var cp2 = {
- x: cp1.x + (p.x - this.currentX_) / 3.0,
- y: cp1.y + (p.y - this.currentY_) / 3.0
- };
-
- bezierCurveTo(this, cp1, cp2, p);
- };
-
- contextPrototype.arc = function(aX, aY, aRadius,
- aStartAngle, aEndAngle, aClockwise) {
- aRadius *= Z;
- var arcType = aClockwise ? 'at' : 'wa';
-
- var xStart = aX + mc(aStartAngle) * aRadius - Z2;
- var yStart = aY + ms(aStartAngle) * aRadius - Z2;
-
- var xEnd = aX + mc(aEndAngle) * aRadius - Z2;
- var yEnd = aY + ms(aEndAngle) * aRadius - Z2;
-
- // IE won't render arches drawn counter clockwise if xStart == xEnd.
- if (xStart == xEnd && !aClockwise) {
- xStart += 0.125; // Offset xStart by 1/80 of a pixel. Use something
- // that can be represented in binary
- }
-
- var p = this.getCoords_(aX, aY);
- var pStart = this.getCoords_(xStart, yStart);
- var pEnd = this.getCoords_(xEnd, yEnd);
-
- this.currentPath_.push({type: arcType,
- x: p.x,
- y: p.y,
- radius: aRadius,
- xStart: pStart.x,
- yStart: pStart.y,
- xEnd: pEnd.x,
- yEnd: pEnd.y});
-
- };
-
- contextPrototype.rect = function(aX, aY, aWidth, aHeight) {
- this.moveTo(aX, aY);
- this.lineTo(aX + aWidth, aY);
- this.lineTo(aX + aWidth, aY + aHeight);
- this.lineTo(aX, aY + aHeight);
- this.closePath();
- };
-
- contextPrototype.strokeRect = function(aX, aY, aWidth, aHeight) {
- var oldPath = this.currentPath_;
- this.beginPath();
-
- this.moveTo(aX, aY);
- this.lineTo(aX + aWidth, aY);
- this.lineTo(aX + aWidth, aY + aHeight);
- this.lineTo(aX, aY + aHeight);
- this.closePath();
- this.stroke();
-
- this.currentPath_ = oldPath;
- };
-
- contextPrototype.fillRect = function(aX, aY, aWidth, aHeight) {
- var oldPath = this.currentPath_;
- this.beginPath();
-
- this.moveTo(aX, aY);
- this.lineTo(aX + aWidth, aY);
- this.lineTo(aX + aWidth, aY + aHeight);
- this.lineTo(aX, aY + aHeight);
- this.closePath();
- this.fill();
-
- this.currentPath_ = oldPath;
- };
-
- contextPrototype.createLinearGradient = function(aX0, aY0, aX1, aY1) {
- var gradient = new CanvasGradient_('gradient');
- gradient.x0_ = aX0;
- gradient.y0_ = aY0;
- gradient.x1_ = aX1;
- gradient.y1_ = aY1;
- return gradient;
- };
-
- contextPrototype.createRadialGradient = function(aX0, aY0, aR0,
- aX1, aY1, aR1) {
- var gradient = new CanvasGradient_('gradientradial');
- gradient.x0_ = aX0;
- gradient.y0_ = aY0;
- gradient.r0_ = aR0;
- gradient.x1_ = aX1;
- gradient.y1_ = aY1;
- gradient.r1_ = aR1;
- return gradient;
- };
-
- contextPrototype.drawImage = function(image, var_args) {
- var dx, dy, dw, dh, sx, sy, sw, sh;
-
- // to find the original width we overide the width and height
- var oldRuntimeWidth = image.runtimeStyle.width;
- var oldRuntimeHeight = image.runtimeStyle.height;
- image.runtimeStyle.width = 'auto';
- image.runtimeStyle.height = 'auto';
-
- // get the original size
- var w = image.width;
- var h = image.height;
-
- // and remove overides
- image.runtimeStyle.width = oldRuntimeWidth;
- image.runtimeStyle.height = oldRuntimeHeight;
-
- if (arguments.length == 3) {
- dx = arguments[1];
- dy = arguments[2];
- sx = sy = 0;
- sw = dw = w;
- sh = dh = h;
- } else if (arguments.length == 5) {
- dx = arguments[1];
- dy = arguments[2];
- dw = arguments[3];
- dh = arguments[4];
- sx = sy = 0;
- sw = w;
- sh = h;
- } else if (arguments.length == 9) {
- sx = arguments[1];
- sy = arguments[2];
- sw = arguments[3];
- sh = arguments[4];
- dx = arguments[5];
- dy = arguments[6];
- dw = arguments[7];
- dh = arguments[8];
- } else {
- throw Error('Invalid number of arguments');
- }
-
- var d = this.getCoords_(dx, dy);
-
- var w2 = sw / 2;
- var h2 = sh / 2;
-
- var vmlStr = [];
-
- var W = 10;
- var H = 10;
-
- // For some reason that I've now forgotten, using divs didn't work
- vmlStr.push(' <g_vml_:group',
- ' coordsize="', Z * W, ',', Z * H, '"',
- ' coordorigin="0,0"' ,
- ' style="width:', W, 'px;height:', H, 'px;position:absolute;');
-
- // If filters are necessary (rotation exists), create them
- // filters are bog-slow, so only create them if abbsolutely necessary
- // The following check doesn't account for skews (which don't exist
- // in the canvas spec (yet) anyway.
-
- if (this.m_[0][0] != 1 || this.m_[0][1]) {
- var filter = [];
-
- // Note the 12/21 reversal
- filter.push('M11=', this.m_[0][0], ',',
- 'M12=', this.m_[1][0], ',',
- 'M21=', this.m_[0][1], ',',
- 'M22=', this.m_[1][1], ',',
- 'Dx=', mr(d.x / Z), ',',
- 'Dy=', mr(d.y / Z), '');
-
- // Bounding box calculation (need to minimize displayed area so that
- // filters don't waste time on unused pixels.
- var max = d;
- var c2 = this.getCoords_(dx + dw, dy);
- var c3 = this.getCoords_(dx, dy + dh);
- var c4 = this.getCoords_(dx + dw, dy + dh);
-
- max.x = m.max(max.x, c2.x, c3.x, c4.x);
- max.y = m.max(max.y, c2.y, c3.y, c4.y);
-
- vmlStr.push('padding:0 ', mr(max.x / Z), 'px ', mr(max.y / Z),
- 'px 0;filter:progid:DXImageTransform.Microsoft.Matrix(',
- filter.join(''), ", sizingmethod='clip');")
- } else {
- vmlStr.push('top:', mr(d.y / Z), 'px;left:', mr(d.x / Z), 'px;');
- }
-
- vmlStr.push(' ">' ,
- '<g_vml_:image src="', image.src, '"',
- ' style="width:', Z * dw, 'px;',
- ' height:', Z * dh, 'px;"',
- ' cropleft="', sx / w, '"',
- ' croptop="', sy / h, '"',
- ' cropright="', (w - sx - sw) / w, '"',
- ' cropbottom="', (h - sy - sh) / h, '"',
- ' />',
- '</g_vml_:group>');
-
- this.element_.insertAdjacentHTML('BeforeEnd',
- vmlStr.join(''));
- };
-
- contextPrototype.stroke = function(aFill) {
- var lineStr = [];
- var lineOpen = false;
- var a = processStyle(aFill ? this.fillStyle : this.strokeStyle);
- var color = a.color;
- var opacity = a.alpha * this.globalAlpha;
-
- var W = 10;
- var H = 10;
-
- lineStr.push('<g_vml_:shape',
- ' filled="', !!aFill, '"',
- ' style="position:absolute;width:', W, 'px;height:', H, 'px;"',
- ' coordorigin="0 0" coordsize="', Z * W, ' ', Z * H, '"',
- ' stroked="', !aFill, '"',
- ' path="');
-
- var newSeq = false;
- var min = {x: null, y: null};
- var max = {x: null, y: null};
-
- for (var i = 0; i < this.currentPath_.length; i++) {
- var p = this.currentPath_[i];
- var c;
-
- switch (p.type) {
- case 'moveTo':
- c = p;
- lineStr.push(' m ', mr(p.x), ',', mr(p.y));
- break;
- case 'lineTo':
- lineStr.push(' l ', mr(p.x), ',', mr(p.y));
- break;
- case 'close':
- lineStr.push(' x ');
- p = null;
- break;
- case 'bezierCurveTo':
- lineStr.push(' c ',
- mr(p.cp1x), ',', mr(p.cp1y), ',',
- mr(p.cp2x), ',', mr(p.cp2y), ',',
- mr(p.x), ',', mr(p.y));
- break;
- case 'at':
- case 'wa':
- lineStr.push(' ', p.type, ' ',
- mr(p.x - this.arcScaleX_ * p.radius), ',',
- mr(p.y - this.arcScaleY_ * p.radius), ' ',
- mr(p.x + this.arcScaleX_ * p.radius), ',',
- mr(p.y + this.arcScaleY_ * p.radius), ' ',
- mr(p.xStart), ',', mr(p.yStart), ' ',
- mr(p.xEnd), ',', mr(p.yEnd));
- break;
- }
-
-
- // TODO: Following is broken for curves due to
- // move to proper paths.
-
- // Figure out dimensions so we can do gradient fills
- // properly
- if (p) {
- if (min.x == null || p.x < min.x) {
- min.x = p.x;
- }
- if (max.x == null || p.x > max.x) {
- max.x = p.x;
- }
- if (min.y == null || p.y < min.y) {
- min.y = p.y;
- }
- if (max.y == null || p.y > max.y) {
- max.y = p.y;
- }
- }
- }
- lineStr.push(' ">');
-
- if (!aFill) {
- var lineWidth = this.lineScale_ * this.lineWidth;
-
- // VML cannot correctly render a line if the width is less than 1px.
- // In that case, we dilute the color to make the line look thinner.
- if (lineWidth < 1) {
- opacity *= lineWidth;
- }
-
- lineStr.push(
- '<g_vml_:stroke',
- ' opacity="', opacity, '"',
- ' joinstyle="', this.lineJoin, '"',
- ' miterlimit="', this.miterLimit, '"',
- ' endcap="', processLineCap(this.lineCap), '"',
- ' weight="', lineWidth, 'px"',
- ' color="', color, '" />'
- );
- } else if (typeof this.fillStyle == 'object') {
- var fillStyle = this.fillStyle;
- var angle = 0;
- var focus = {x: 0, y: 0};
-
- // additional offset
- var shift = 0;
- // scale factor for offset
- var expansion = 1;
-
- if (fillStyle.type_ == 'gradient') {
- var x0 = fillStyle.x0_ / this.arcScaleX_;
- var y0 = fillStyle.y0_ / this.arcScaleY_;
- var x1 = fillStyle.x1_ / this.arcScaleX_;
- var y1 = fillStyle.y1_ / this.arcScaleY_;
- var p0 = this.getCoords_(x0, y0);
- var p1 = this.getCoords_(x1, y1);
- var dx = p1.x - p0.x;
- var dy = p1.y - p0.y;
- angle = Math.atan2(dx, dy) * 180 / Math.PI;
-
- // The angle should be a non-negative number.
- if (angle < 0) {
- angle += 360;
- }
-
- // Very small angles produce an unexpected result because they are
- // converted to a scientific notation string.
- if (angle < 1e-6) {
- angle = 0;
- }
- } else {
- var p0 = this.getCoords_(fillStyle.x0_, fillStyle.y0_);
- var width = max.x - min.x;
- var height = max.y - min.y;
- focus = {
- x: (p0.x - min.x) / width,
- y: (p0.y - min.y) / height
- };
-
- width /= this.arcScaleX_ * Z;
- height /= this.arcScaleY_ * Z;
- var dimension = m.max(width, height);
- shift = 2 * fillStyle.r0_ / dimension;
- expansion = 2 * fillStyle.r1_ / dimension - shift;
- }
-
- // We need to sort the color stops in ascending order by offset,
- // otherwise IE won't interpret it correctly.
- var stops = fillStyle.colors_;
- stops.sort(function(cs1, cs2) {
- return cs1.offset - cs2.offset;
- });
-
- var length = stops.length;
- var color1 = stops[0].color;
- var color2 = stops[length - 1].color;
- var opacity1 = stops[0].alpha * this.globalAlpha;
- var opacity2 = stops[length - 1].alpha * this.globalAlpha;
-
- var colors = [];
- for (var i = 0; i < length; i++) {
- var stop = stops[i];
- colors.push(stop.offset * expansion + shift + ' ' + stop.color);
- }
-
- // When colors attribute is used, the meanings of opacity and o:opacity2
- // are reversed.
- lineStr.push('<g_vml_:fill type="', fillStyle.type_, '"',
- ' method="none" focus="100%"',
- ' color="', color1, '"',
- ' color2="', color2, '"',
- ' colors="', colors.join(','), '"',
- ' opacity="', opacity2, '"',
- ' g_o_:opacity2="', opacity1, '"',
- ' angle="', angle, '"',
- ' focusposition="', focus.x, ',', focus.y, '" />');
- } else {
- lineStr.push('<g_vml_:fill color="', color, '" opacity="', opacity,
- '" />');
- }
-
- lineStr.push('</g_vml_:shape>');
-
- this.element_.insertAdjacentHTML('beforeEnd', lineStr.join(''));
- };
-
- contextPrototype.fill = function() {
- this.stroke(true);
- }
-
- contextPrototype.closePath = function() {
- this.currentPath_.push({type: 'close'});
- };
-
- /**
- * @private
- */
- contextPrototype.getCoords_ = function(aX, aY) {
- var m = this.m_;
- return {
- x: Z * (aX * m[0][0] + aY * m[1][0] + m[2][0]) - Z2,
- y: Z * (aX * m[0][1] + aY * m[1][1] + m[2][1]) - Z2
- }
- };
-
- contextPrototype.save = function() {
- var o = {};
- copyState(this, o);
- this.aStack_.push(o);
- this.mStack_.push(this.m_);
- this.m_ = matrixMultiply(createMatrixIdentity(), this.m_);
- };
-
- contextPrototype.restore = function() {
- copyState(this.aStack_.pop(), this);
- this.m_ = this.mStack_.pop();
- };
-
- function matrixIsFinite(m) {
- for (var j = 0; j < 3; j++) {
- for (var k = 0; k < 2; k++) {
- if (!isFinite(m[j][k]) || isNaN(m[j][k])) {
- return false;
- }
- }
- }
- return true;
- }
-
- function setM(ctx, m, updateLineScale) {
- if (!matrixIsFinite(m)) {
- return;
- }
- ctx.m_ = m;
-
- if (updateLineScale) {
- // Get the line scale.
- // Determinant of this.m_ means how much the area is enlarged by the
- // transformation. So its square root can be used as a scale factor
- // for width.
- var det = m[0][0] * m[1][1] - m[0][1] * m[1][0];
- ctx.lineScale_ = sqrt(abs(det));
- }
- }
-
- contextPrototype.translate = function(aX, aY) {
- var m1 = [
- [1, 0, 0],
- [0, 1, 0],
- [aX, aY, 1]
- ];
-
- setM(this, matrixMultiply(m1, this.m_), false);
- };
-
- contextPrototype.rotate = function(aRot) {
- var c = mc(aRot);
- var s = ms(aRot);
-
- var m1 = [
- [c, s, 0],
- [-s, c, 0],
- [0, 0, 1]
- ];
-
- setM(this, matrixMultiply(m1, this.m_), false);
- };
-
- contextPrototype.scale = function(aX, aY) {
- this.arcScaleX_ *= aX;
- this.arcScaleY_ *= aY;
- var m1 = [
- [aX, 0, 0],
- [0, aY, 0],
- [0, 0, 1]
- ];
-
- setM(this, matrixMultiply(m1, this.m_), true);
- };
-
- contextPrototype.transform = function(m11, m12, m21, m22, dx, dy) {
- var m1 = [
- [m11, m12, 0],
- [m21, m22, 0],
- [dx, dy, 1]
- ];
-
- setM(this, matrixMultiply(m1, this.m_), true);
- };
-
- contextPrototype.setTransform = function(m11, m12, m21, m22, dx, dy) {
- var m = [
- [m11, m12, 0],
- [m21, m22, 0],
- [dx, dy, 1]
- ];
-
- setM(this, m, true);
- };
-
- /******** STUBS ********/
- contextPrototype.clip = function() {
- // TODO: Implement
- };
-
- contextPrototype.arcTo = function() {
- // TODO: Implement
- };
-
- contextPrototype.createPattern = function() {
- return new CanvasPattern_;
- };
-
- // Gradient / Pattern Stubs
- function CanvasGradient_(aType) {
- this.type_ = aType;
- this.x0_ = 0;
- this.y0_ = 0;
- this.r0_ = 0;
- this.x1_ = 0;
- this.y1_ = 0;
- this.r1_ = 0;
- this.colors_ = [];
- }
-
- CanvasGradient_.prototype.addColorStop = function(aOffset, aColor) {
- aColor = processStyle(aColor);
- this.colors_.push({offset: aOffset,
- color: aColor.color,
- alpha: aColor.alpha});
- };
-
- function CanvasPattern_() {}
-
- // set up externs
- G_vmlCanvasManager = G_vmlCanvasManager_;
- CanvasRenderingContext2D = CanvasRenderingContext2D_;
- CanvasGradient = CanvasGradient_;
- CanvasPattern = CanvasPattern_;
-
-})();
-
-} // if
--- a/mercurial/templates/static/followlines.js Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/followlines.js Mon Jan 22 17:53:02 2018 -0500
@@ -13,7 +13,7 @@
}
// URL to complement with "linerange" query parameter
var targetUri = sourcelines.dataset.logurl;
- if (typeof targetUri === 'undefined')Â {
+ if (typeof targetUri === 'undefined') {
return;
}
@@ -38,7 +38,7 @@
// element
var selectableElements = Array.prototype.filter.call(
sourcelines.children,
- function(x) { return x.tagName === selectableTag });
+ function(x) { return x.tagName === selectableTag; });
var btnTitleStart = 'start following lines history from here';
var btnTitleEnd = 'terminate line block selection here';
@@ -62,7 +62,7 @@
}
// extend DOM with CSS class for selection highlight and action buttons
- var followlinesButtons = []
+ var followlinesButtons = [];
for (var i = 0; i < selectableElements.length; i++) {
selectableElements[i].classList.add('followlines-select');
var btn = createButton();
@@ -114,7 +114,7 @@
if (parent === null) {
return null;
}
- if (element.tagName == selectableTag && parent.isSameNode(sourcelines)) {
+ if (element.tagName === selectableTag && parent.isSameNode(sourcelines)) {
return element;
}
return selectableParent(parent);
@@ -182,7 +182,7 @@
// compute line range (startId, endId)
var endId = parseInt(endElement.id.slice(1));
- if (endId == startId) {
+ if (endId === startId) {
// clicked twice the same line, cancel and reset initial state
// (CSS, event listener for selection start)
removeSelectedCSSClass();
--- a/mercurial/templates/static/mercurial.js Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/mercurial.js Mon Jan 22 17:53:02 2018 -0500
@@ -25,32 +25,29 @@
function Graph() {
this.canvas = document.getElementById('graph');
- if (window.G_vmlCanvasManager) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas);
this.ctx = this.canvas.getContext('2d');
this.ctx.strokeStyle = 'rgb(0, 0, 0)';
this.ctx.fillStyle = 'rgb(0, 0, 0)';
- this.cur = [0, 0];
- this.line_width = 3;
this.bg = [0, 4];
this.cell = [2, 0];
this.columns = 0;
- this.revlink = '';
+
+}
- this.reset = function() {
+Graph.prototype = {
+ reset: function() {
this.bg = [0, 4];
this.cell = [2, 0];
this.columns = 0;
- document.getElementById('nodebgs').innerHTML = '';
- document.getElementById('graphnodes').innerHTML = '';
- }
+ },
- this.scale = function(height) {
+ scale: function(height) {
this.bg_height = height;
this.box_size = Math.floor(this.bg_height / 1.2);
this.cell_height = this.box_size;
- }
+ },
- this.setColor = function(color, bg, fg) {
+ setColor: function(color, bg, fg) {
// Set the colour.
//
@@ -62,9 +59,9 @@
// provides the multiplier that should be applied to
// the foreground colours.
var s;
- if(typeof color == "string") {
+ if(typeof color === "string") {
s = "#" + color;
- } else { //typeof color == "number"
+ } else { //typeof color === "number"
color %= colors.length;
var red = (colors[color][0] * fg) || bg;
var green = (colors[color][1] * fg) || bg;
@@ -78,9 +75,9 @@
this.ctx.fillStyle = s;
return s;
- }
+ },
- this.edge = function(x0, y0, x1, y1, color, width) {
+ edge: function(x0, y0, x1, y1, color, width) {
this.setColor(color, 0.0, 0.65);
if(width >= 0)
@@ -90,28 +87,106 @@
this.ctx.lineTo(x1, y1);
this.ctx.stroke();
- }
+ },
+
+ graphNodeCurrent: function(x, y, radius) {
+ this.ctx.lineWidth = 2;
+ this.ctx.beginPath();
+ this.ctx.arc(x, y, radius * 1.75, 0, Math.PI * 2, true);
+ this.ctx.stroke();
+ },
+
+ graphNodeClosing: function(x, y, radius) {
+ this.ctx.fillRect(x - radius, y - 1.5, radius * 2, 3);
+ },
- this.render = function(data) {
+ graphNodeUnstable: function(x, y, radius) {
+ var x30 = radius * Math.cos(Math.PI / 6);
+ var y30 = radius * Math.sin(Math.PI / 6);
+ this.ctx.lineWidth = 2;
+ this.ctx.beginPath();
+ this.ctx.moveTo(x, y - radius);
+ this.ctx.lineTo(x, y + radius);
+ this.ctx.moveTo(x - x30, y - y30);
+ this.ctx.lineTo(x + x30, y + y30);
+ this.ctx.moveTo(x - x30, y + y30);
+ this.ctx.lineTo(x + x30, y - y30);
+ this.ctx.stroke();
+ },
+
+ graphNodeObsolete: function(x, y, radius) {
+ var p45 = radius * Math.cos(Math.PI / 4);
+ this.ctx.lineWidth = 3;
+ this.ctx.beginPath();
+ this.ctx.moveTo(x - p45, y - p45);
+ this.ctx.lineTo(x + p45, y + p45);
+ this.ctx.moveTo(x - p45, y + p45);
+ this.ctx.lineTo(x + p45, y - p45);
+ this.ctx.stroke();
+ },
+
+ graphNodeNormal: function(x, y, radius) {
+ this.ctx.beginPath();
+ this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
+ this.ctx.fill();
+ },
- var backgrounds = '';
- var nodedata = '';
+ vertex: function(x, y, radius, color, parity, cur) {
+ this.ctx.save();
+ this.setColor(color, 0.25, 0.75);
+ if (cur.graphnode[0] === '@') {
+ this.graphNodeCurrent(x, y, radius);
+ }
+ switch (cur.graphnode.substr(-1)) {
+ case '_':
+ this.graphNodeClosing(x, y, radius);
+ break;
+ case '*':
+ this.graphNodeUnstable(x, y, radius);
+ break;
+ case 'x':
+ this.graphNodeObsolete(x, y, radius);
+ break;
+ default:
+ this.graphNodeNormal(x, y, radius);
+ }
+ this.ctx.restore();
- for (var i in data) {
+ var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
+ var item = document.querySelector('[data-node="' + cur.node + '"]');
+ if (item) {
+ item.style.paddingLeft = left + 'px';
+ }
+ },
+
+ render: function(data) {
+
+ var i, j, cur, line, start, end, color, x, y, x0, y0, x1, y1, column, radius;
+
+ var cols = 0;
+ for (i = 0; i < data.length; i++) {
+ cur = data[i];
+ for (j = 0; j < cur.edges.length; j++) {
+ line = cur.edges[j];
+ cols = Math.max(cols, line[0], line[1]);
+ }
+ }
+ this.canvas.width = (cols + 1) * this.bg_height;
+ this.canvas.height = (data.length + 1) * this.bg_height - 27;
+
+ for (i = 0; i < data.length; i++) {
var parity = i % 2;
this.cell[1] += this.bg_height;
this.bg[1] += this.bg_height;
- var cur = data[i];
- var node = cur[1];
- var edges = cur[2];
+ cur = data[i];
var fold = false;
var prevWidth = this.ctx.lineWidth;
- for (var j in edges) {
+ for (j = 0; j < cur.edges.length; j++) {
- line = edges[j];
+ line = cur.edges[j];
start = line[0];
end = line[1];
color = line[2];
@@ -126,8 +201,8 @@
this.columns += 1;
}
- if (start == this.columns && start > end) {
- var fold = true;
+ if (start === this.columns && start > end) {
+ fold = true;
}
x0 = this.cell[0] + this.box_size * start + this.box_size / 2;
@@ -142,26 +217,21 @@
// Draw the revision node in the right column
- column = node[0]
- color = node[1]
+ column = cur.vertex[0];
+ color = cur.vertex[1];
radius = this.box_size / 8;
x = this.cell[0] + this.box_size * column + this.box_size / 2;
y = this.bg[1] - this.bg_height / 2;
- var add = this.vertex(x, y, color, parity, cur);
- backgrounds += add[0];
- nodedata += add[1];
+ this.vertex(x, y, radius, color, parity, cur);
if (fold) this.columns -= 1;
}
- document.getElementById('nodebgs').innerHTML += backgrounds;
- document.getElementById('graphnodes').innerHTML += nodedata;
-
}
-}
+};
function process_dates(parentSelector){
@@ -228,10 +298,11 @@
return shortdate(once);
}
- for (unit in scales){
+ for (var unit in scales){
+ if (!scales.hasOwnProperty(unit)) { continue; }
var s = scales[unit];
var n = Math.floor(delta / s);
- if ((n >= 2) || (s == 1)){
+ if ((n >= 2) || (s === 1)){
if (future){
return format(n, unit) + ' from now';
} else {
@@ -259,7 +330,7 @@
function toggleDiffstat() {
var curdetails = document.getElementById('diffstatdetails').style.display;
- var curexpand = curdetails == 'none' ? 'inline' : 'none';
+ var curexpand = curdetails === 'none' ? 'inline' : 'none';
document.getElementById('diffstatdetails').style.display = curexpand;
document.getElementById('diffstatexpand').style.display = curdetails;
}
@@ -273,7 +344,8 @@
function setLinewrap(enable) {
var nodes = document.getElementsByClassName('sourcelines');
- for (var i = 0; i < nodes.length; i++) {
+ var i;
+ for (i = 0; i < nodes.length; i++) {
if (enable) {
nodes[i].classList.add('wrap');
} else {
@@ -282,7 +354,7 @@
}
var links = document.getElementsByClassName('linewraplink');
- for (var i = 0; i < links.length; i++) {
+ for (i = 0; i < links.length; i++) {
links[i].innerHTML = enable ? 'on' : 'off';
}
}
@@ -297,12 +369,12 @@
}
function makeRequest(url, method, onstart, onsuccess, onerror, oncomplete) {
- xfr = new XMLHttpRequest();
- xfr.onreadystatechange = function() {
- if (xfr.readyState === 4) {
+ var xhr = new XMLHttpRequest();
+ xhr.onreadystatechange = function() {
+ if (xhr.readyState === 4) {
try {
- if (xfr.status === 200) {
- onsuccess(xfr.responseText);
+ if (xhr.status === 200) {
+ onsuccess(xhr.responseText);
} else {
throw 'server error';
}
@@ -314,11 +386,11 @@
}
};
- xfr.open(method, url);
- xfr.overrideMimeType("text/xhtml; charset=" + document.characterSet.toLowerCase());
- xfr.send();
+ xhr.open(method, url);
+ xhr.overrideMimeType("text/xhtml; charset=" + document.characterSet.toLowerCase());
+ xhr.send();
onstart();
- return xfr;
+ return xhr;
}
function removeByClassName(className) {
@@ -338,14 +410,26 @@
element.insertAdjacentHTML('beforeend', format(formatStr, replacements));
}
+function adoptChildren(from, to) {
+ var nodes = from.children;
+ var curClass = 'c' + Date.now();
+ while (nodes.length) {
+ var node = nodes[0];
+ node = document.adoptNode(node);
+ node.classList.add(curClass);
+ to.appendChild(node);
+ }
+ process_dates('.' + curClass);
+}
+
function ajaxScrollInit(urlFormat,
nextPageVar,
nextPageVarGet,
containerSelector,
messageFormat,
mode) {
- updateInitiated = false;
- container = document.querySelector(containerSelector);
+ var updateInitiated = false;
+ var container = document.querySelector(containerSelector);
function scrollHandler() {
if (updateInitiated) {
@@ -354,8 +438,7 @@
var scrollHeight = document.documentElement.scrollHeight;
var clientHeight = document.documentElement.clientHeight;
- var scrollTop = document.body.scrollTop
- || document.documentElement.scrollTop;
+ var scrollTop = document.body.scrollTop || document.documentElement.scrollTop;
if (scrollHeight - (scrollTop + clientHeight) < 50) {
updateInitiated = true;
@@ -382,36 +465,20 @@
appendFormatHTML(container, messageFormat, message);
},
function onsuccess(htmlText) {
- if (mode == 'graph') {
- var sizes = htmlText.match(/^\s*<canvas id="graph" width="(\d+)" height="(\d+)"><\/canvas>$/m);
- var addWidth = sizes[1];
- var addHeight = sizes[2];
- addWidth = parseInt(addWidth);
- addHeight = parseInt(addHeight);
- graph.canvas.width = addWidth;
- graph.canvas.height = addHeight;
+ var doc = docFromHTML(htmlText);
+ if (mode === 'graph') {
+ var graph = window.graph;
var dataStr = htmlText.match(/^\s*var data = (.*);$/m)[1];
var data = JSON.parse(dataStr);
- if (data.length < nextPageVar) {
- nextPageVar = undefined;
- }
graph.reset();
+ adoptChildren(doc.querySelector('#graphnodes'), container.querySelector('#graphnodes'));
graph.render(data);
} else {
- var doc = docFromHTML(htmlText);
- var nodes = doc.querySelector(containerSelector).children;
- var curClass = 'c' + Date.now();
- while (nodes.length) {
- var node = nodes[0];
- node = document.adoptNode(node);
- node.classList.add(curClass);
- container.appendChild(node);
- }
- process_dates('.' + curClass);
+ adoptChildren(doc.querySelector(containerSelector), container);
}
- nextPageVar = nextPageVarGet(htmlText, nextPageVar);
+ nextPageVar = nextPageVarGet(htmlText);
},
function onerror(errorText) {
var message = {
@@ -450,7 +517,7 @@
"ignoreblanklines",
];
- var urlParams = new URLSearchParams(window.location.search);
+ var urlParams = new window.URLSearchParams(window.location.search);
function updateAndRefresh(e) {
var checkbox = e.target;
@@ -459,7 +526,7 @@
window.location.search = urlParams.toString();
}
- var allChecked = form.getAttribute("data-ignorews") == "1";
+ var allChecked = form.getAttribute("data-ignorews") === "1";
for (var i = 0; i < KEYS.length; i++) {
var key = KEYS[i];
@@ -469,11 +536,11 @@
continue;
}
- currentValue = form.getAttribute("data-" + key);
- checkbox.checked = currentValue != "0";
+ var currentValue = form.getAttribute("data-" + key);
+ checkbox.checked = currentValue !== "0";
// ignorews implies ignorewsamount and ignorewseol.
- if (allChecked && (key == "ignorewsamount" || key == "ignorewseol")) {
+ if (allChecked && (key === "ignorewsamount" || key === "ignorewseol")) {
checkbox.checked = true;
checkbox.disabled = true;
}
--- a/mercurial/templates/static/style-gitweb.css Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/style-gitweb.css Mon Jan 22 17:53:02 2018 -0500
@@ -61,8 +61,6 @@
}
td.indexlinks a:hover { background-color: #6666aa; }
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
-div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
-div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
.search {
margin-right: 8px;
@@ -122,6 +120,18 @@
background-color: #ffaaff;
border-color: #ffccff #ff00ee #ff00ee #ffccff;
}
+span.logtags span.phasetag {
+ background-color: #dfafff;
+ border-color: #e2b8ff #ce48ff #ce48ff #e2b8ff;
+}
+span.logtags span.obsoletetag {
+ background-color: #dddddd;
+ border-color: #e4e4e4 #a3a3a3 #a3a3a3 #e4e4e4;
+}
+span.logtags span.instabilitytag {
+ background-color: #ffb1c0;
+ border-color: #ffbbc8 #ff4476 #ff4476 #ffbbc8;
+}
span.logtags span.tagtag {
background-color: #ffffaa;
border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
@@ -191,10 +201,9 @@
}
div#followlines {
- background-color: #B7B7B7;
- border: 1px solid #CCC;
- border-radius: 5px;
- padding: 4px;
+ background-color: #FFF;
+ border: 1px solid #d9d8d1;
+ padding: 5px;
position: fixed;
}
@@ -293,30 +302,26 @@
margin: 0;
}
-ul#nodebgs {
+ul#graphnodes {
list-style: none inside none;
padding: 0;
margin: 0;
- top: -0.7em;
-}
-
-ul#graphnodes li, ul#nodebgs li {
- height: 39px;
}
-ul#graphnodes {
+ul#graphnodes li {
+ position: relative;
+ height: 37px;
+ overflow: visible;
+ padding-top: 2px;
+}
+
+ul#graphnodes li .fg {
position: absolute;
z-index: 10;
- top: -0.8em;
- list-style: none inside none;
- padding: 0;
}
ul#graphnodes li .info {
- display: block;
font-size: 100%;
- position: relative;
- top: -3px;
font-style: italic;
}
--- a/mercurial/templates/static/style-monoblue.css Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/style-monoblue.css Mon Jan 22 17:53:02 2018 -0500
@@ -233,6 +233,18 @@
background-color: #ffaaff;
border-color: #ffccff #ff00ee #ff00ee #ffccff;
}
+span.logtags span.phasetag {
+ background-color: #dfafff;
+ border-color: #e2b8ff #ce48ff #ce48ff #e2b8ff;
+}
+span.logtags span.obsoletetag {
+ background-color: #dddddd;
+ border-color: #e4e4e4 #a3a3a3 #a3a3a3 #e4e4e4;
+}
+span.logtags span.instabilitytag {
+ background-color: #ffb1c0;
+ border-color: #ffbbc8 #ff4476 #ff4476 #ffbbc8;
+}
span.logtags span.tagtag {
background-color: #ffffaa;
border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
@@ -309,6 +321,7 @@
pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2),
pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color: #D5E1E6; }
+tr:target td,
pre.sourcelines > span:target,
pre.sourcelines.stripes > span:target {
background-color: #bfdfff;
@@ -456,7 +469,7 @@
/** canvas **/
div#wrapper {
position: relative;
- font-size: 1.2em;
+ font-size: 1.1em;
}
canvas {
@@ -465,32 +478,33 @@
top: -0.7em;
}
-ul#nodebgs li.parity0 {
+ul#graphnodes li.parity0 {
background: #F1F6F7;
}
-ul#nodebgs li.parity1 {
+ul#graphnodes li.parity1 {
background: #FFFFFF;
}
ul#graphnodes {
- position: absolute;
- z-index: 10;
- top: 7px;
list-style: none inside none;
+ margin: 0;
+ padding: 0;
}
-ul#nodebgs {
- list-style: none inside none;
+ul#graphnodes li {
+ height: 37px;
+ overflow: visible;
+ padding-top: 2px;
}
-ul#graphnodes li, ul#nodebgs li {
- height: 39px;
+ul#graphnodes li .fg {
+ position: absolute;
+ z-index: 10;
}
ul#graphnodes li .info {
- display: block;
- position: relative;
+ margin-top: 2px;
}
/** end of canvas **/
--- a/mercurial/templates/static/style-paper.css Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/style-paper.css Mon Jan 22 17:53:02 2018 -0500
@@ -137,6 +137,33 @@
margin: 1em 0;
}
+.phase {
+ color: #999;
+ font-size: 70%;
+ border-bottom: 1px dotted #999;
+ font-weight: normal;
+ margin-left: .5em;
+ vertical-align: baseline;
+}
+
+.obsolete {
+ color: #999;
+ font-size: 70%;
+ border-bottom: 1px dashed #999;
+ font-weight: normal;
+ margin-left: .5em;
+ vertical-align: baseline;
+}
+
+.instability {
+ color: #000;
+ font-size: 70%;
+ border-bottom: 1px solid #000;
+ font-weight: normal;
+ margin-left: .5em;
+ vertical-align: baseline;
+}
+
.tag {
color: #999;
font-size: 70%;
@@ -165,10 +192,6 @@
vertical-align: baseline;
}
-h3 .branchname {
- font-size: 80%;
-}
-
/* Common */
pre { margin: 0; }
@@ -190,6 +213,7 @@
}
.bigtable td {
+ padding: 1px 4px;
vertical-align: top;
}
@@ -295,10 +319,9 @@
}
div#followlines {
- background-color: #B7B7B7;
- border: 1px solid #CCC;
- border-radius: 5px;
- padding: 4px;
+ background-color: #FFF;
+ border: 1px solid #999;
+ padding: 5px;
position: fixed;
}
@@ -409,7 +432,6 @@
text-align: right;
font-weight: normal;
color: #999;
- margin-right: .5em;
vertical-align: top;
}
@@ -438,29 +460,23 @@
}
ul#graphnodes {
- position: absolute;
- z-index: 10;
- top: -1.0em;
- list-style: none inside none;
- padding: 0;
-}
-
-ul#nodebgs {
list-style: none inside none;
padding: 0;
margin: 0;
- top: -0.7em;
}
-ul#graphnodes li, ul#nodebgs li {
+ul#graphnodes li {
height: 39px;
+ overflow: visible;
+}
+
+ul#graphnodes li .fg {
+ position: absolute;
+ z-index: 10;
}
ul#graphnodes li .info {
- display: block;
font-size: 70%;
- position: relative;
- top: -3px;
}
/* Comparison */
--- a/mercurial/templates/static/style.css Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/templates/static/style.css Mon Jan 22 17:53:02 2018 -0500
@@ -95,28 +95,23 @@
margin: 0;
}
-ul#nodebgs {
+ul#graphnodes {
list-style: none inside none;
padding: 0;
margin: 0;
- top: -0.7em;
-}
-
-ul#graphnodes li, ul#nodebgs li {
- height: 39px;
}
-ul#graphnodes {
+ul#graphnodes li {
+ height: 37px;
+ overflow: visible;
+ padding-top: 2px;
+}
+
+ul#graphnodes li .fg {
position: absolute;
z-index: 10;
- top: -0.85em;
- list-style: none inside none;
- padding: 0;
}
ul#graphnodes li .info {
- display: block;
font-size: 70%;
- position: relative;
- top: -1px;
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/selectors2.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,743 @@
+""" Back-ported, durable, and portable selectors """
+
+# MIT License
+#
+# Copyright (c) 2017 Seth Michael Larson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import collections
+import errno
+import math
+import select
+import socket
+import sys
+import time
+
+from .. import pycompat
+
+namedtuple = collections.namedtuple
+Mapping = collections.Mapping
+
+try:
+ monotonic = time.monotonic
+except AttributeError:
+ monotonic = time.time
+
+__author__ = 'Seth Michael Larson'
+__email__ = 'sethmichaellarson@protonmail.com'
+__version__ = '2.0.0'
+__license__ = 'MIT'
+__url__ = 'https://www.github.com/SethMichaelLarson/selectors2'
+
+__all__ = ['EVENT_READ',
+ 'EVENT_WRITE',
+ 'SelectorKey',
+ 'DefaultSelector',
+ 'BaseSelector']
+
+EVENT_READ = (1 << 0)
+EVENT_WRITE = (1 << 1)
+_DEFAULT_SELECTOR = None
+_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None.
+_ERROR_TYPES = (OSError, IOError, socket.error)
+
+
+SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
+
+
+class _SelectorMapping(Mapping):
+ """ Mapping of file objects to selector keys """
+
+ def __init__(self, selector):
+ self._selector = selector
+
+ def __len__(self):
+ return len(self._selector._fd_to_key)
+
+ def __getitem__(self, fileobj):
+ try:
+ fd = self._selector._fileobj_lookup(fileobj)
+ return self._selector._fd_to_key[fd]
+ except KeyError:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ def __iter__(self):
+ return iter(self._selector._fd_to_key)
+
+
+def _fileobj_to_fd(fileobj):
+ """ Return a file descriptor from a file object. If
+ given an integer will simply return that integer back. """
+ if isinstance(fileobj, int):
+ fd = fileobj
+ else:
+ try:
+ fd = int(fileobj.fileno())
+ except (AttributeError, TypeError, ValueError):
+ raise ValueError("Invalid file object: {0!r}".format(fileobj))
+ if fd < 0:
+ raise ValueError("Invalid file descriptor: {0}".format(fd))
+ return fd
+
+
+class BaseSelector(object):
+ """ Abstract Selector class
+
+ A selector supports registering file objects to be monitored
+ for specific I/O events.
+
+ A file object is a file descriptor or any object with a
+ `fileno()` method. An arbitrary object can be attached to the
+ file object which can be used for example to store context info,
+ a callback, etc.
+
+ A selector can use various implementations (select(), poll(), epoll(),
+ and kqueue()) depending on the platform. The 'DefaultSelector' class uses
+ the most efficient implementation for the current platform.
+ """
+ def __init__(self):
+ # Maps file descriptors to keys.
+ self._fd_to_key = {}
+
+ # Read-only mapping returned by get_map()
+ self._map = _SelectorMapping(self)
+
+ def _fileobj_lookup(self, fileobj):
+ """ Return a file descriptor from a file object.
+ This wraps _fileobj_to_fd() to do an exhaustive
+ search in case the object is invalid but we still
+ have it in our map. Used by unregister() so we can
+ unregister an object that was previously registered
+ even if it is closed. It is also used by _SelectorMapping
+ """
+ try:
+ return _fileobj_to_fd(fileobj)
+ except ValueError:
+
+ # Search through all our mapped keys.
+ for key in self._fd_to_key.values():
+ if key.fileobj is fileobj:
+ return key.fd
+
+ # Raise ValueError after all.
+ raise
+
+ def register(self, fileobj, events, data=None):
+ """ Register a file object for a set of events to monitor. """
+ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
+ raise ValueError("Invalid events: {0!r}".format(events))
+
+ key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
+
+ if key.fd in self._fd_to_key:
+ raise KeyError("{0!r} (FD {1}) is already registered"
+ .format(fileobj, key.fd))
+
+ self._fd_to_key[key.fd] = key
+ return key
+
+ def unregister(self, fileobj):
+ """ Unregister a file object from being monitored. """
+ try:
+ key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ # Getting the fileno of a closed socket on Windows errors with EBADF.
+ except socket.error as err:
+ if err.errno != errno.EBADF:
+ raise
+ else:
+ for key in self._fd_to_key.values():
+ if key.fileobj is fileobj:
+ self._fd_to_key.pop(key.fd)
+ break
+ else:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+ return key
+
+ def modify(self, fileobj, events, data=None):
+ """ Change a registered file object monitored events and data. """
+ # NOTE: Some subclasses optimize this operation even further.
+ try:
+ key = self._fd_to_key[self._fileobj_lookup(fileobj)]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ if events != key.events:
+ self.unregister(fileobj)
+ key = self.register(fileobj, events, data)
+
+ elif data != key.data:
+ # Use a shortcut to update the data.
+ key = key._replace(data=data)
+ self._fd_to_key[key.fd] = key
+
+ return key
+
+ def select(self, timeout=None):
+ """ Perform the actual selection until some monitored file objects
+ are ready or the timeout expires. """
+ raise NotImplementedError()
+
+ def close(self):
+ """ Close the selector. This must be called to ensure that all
+ underlying resources are freed. """
+ self._fd_to_key.clear()
+ self._map = None
+
+ def get_key(self, fileobj):
+ """ Return the key associated with a registered file object. """
+ mapping = self.get_map()
+ if mapping is None:
+ raise RuntimeError("Selector is closed")
+ try:
+ return mapping[fileobj]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ def get_map(self):
+ """ Return a mapping of file objects to selector keys """
+ return self._map
+
+ def _key_from_fd(self, fd):
+ """ Return the key associated to a given file descriptor
+ Return None if it is not found. """
+ try:
+ return self._fd_to_key[fd]
+ except KeyError:
+ return None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *_):
+ self.close()
+
+
+# Almost all platforms have select.select()
+if hasattr(select, "select"):
+ class SelectSelector(BaseSelector):
+ """ Select-based selector. """
+ def __init__(self):
+ super(SelectSelector, self).__init__()
+ self._readers = set()
+ self._writers = set()
+
+ def register(self, fileobj, events, data=None):
+ key = super(SelectSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ self._readers.add(key.fd)
+ if events & EVENT_WRITE:
+ self._writers.add(key.fd)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(SelectSelector, self).unregister(fileobj)
+ self._readers.discard(key.fd)
+ self._writers.discard(key.fd)
+ return key
+
+ def select(self, timeout=None):
+ # Selecting on empty lists on Windows errors out.
+ if not len(self._readers) and not len(self._writers):
+ return []
+
+ timeout = None if timeout is None else max(timeout, 0.0)
+ ready = []
+ r, w, _ = _syscall_wrapper(self._wrap_select, True, self._readers,
+ self._writers, timeout)
+ r = set(r)
+ w = set(w)
+ for fd in r | w:
+ events = 0
+ if fd in r:
+ events |= EVENT_READ
+ if fd in w:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def _wrap_select(self, r, w, timeout=None):
+ """ Wrapper for select.select because timeout is a positional arg """
+ return select.select(r, w, [], timeout)
+
+ __all__.append('SelectSelector')
+
+ # Jython has a different implementation of .fileno() for socket objects.
+ if pycompat.isjython:
+ class _JythonSelectorMapping(object):
+ """ This is an implementation of _SelectorMapping that is built
+ for use specifically with Jython, which does not provide a hashable
+ value from socket.socket.fileno(). """
+
+ def __init__(self, selector):
+ assert isinstance(selector, JythonSelectSelector)
+ self._selector = selector
+
+ def __len__(self):
+ return len(self._selector._sockets)
+
+ def __getitem__(self, fileobj):
+ for sock, key in self._selector._sockets:
+ if sock is fileobj:
+ return key
+ else:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ class JythonSelectSelector(SelectSelector):
+ """ This is an implementation of SelectSelector that is for Jython
+ which works around that Jython's socket.socket.fileno() does not
+ return an integer fd value. All SelectorKey.fd will be equal to -1
+ and should not be used. This instead uses object id to compare fileobj
+ and will only use select.select as it's the only selector that allows
+ directly passing in socket objects rather than registering fds.
+ See: http://bugs.jython.org/issue1678
+ https://wiki.python.org/jython/NewSocketModule#socket.fileno.28.29_does_not_return_an_integer
+ """
+
+ def __init__(self):
+ super(JythonSelectSelector, self).__init__()
+
+ self._sockets = [] # Uses a list of tuples instead of dictionary.
+ self._map = _JythonSelectorMapping(self)
+ self._readers = []
+ self._writers = []
+
+ # Jython has a select.cpython_compatible_select function in older versions.
+ self._select_func = getattr(select, 'cpython_compatible_select', select.select)
+
+ def register(self, fileobj, events, data=None):
+ for sock, _ in self._sockets:
+ if sock is fileobj:
+ raise KeyError("{0!r} is already registered"
+ .format(fileobj, sock))
+
+ key = SelectorKey(fileobj, -1, events, data)
+ self._sockets.append((fileobj, key))
+
+ if events & EVENT_READ:
+ self._readers.append(fileobj)
+ if events & EVENT_WRITE:
+ self._writers.append(fileobj)
+ return key
+
+ def unregister(self, fileobj):
+ for i, (sock, key) in enumerate(self._sockets):
+ if sock is fileobj:
+ break
+ else:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ if key.events & EVENT_READ:
+ self._readers.remove(fileobj)
+ if key.events & EVENT_WRITE:
+ self._writers.remove(fileobj)
+
+ del self._sockets[i]
+ return key
+
+ def _wrap_select(self, r, w, timeout=None):
+ """ Wrapper for select.select because timeout is a positional arg """
+ return self._select_func(r, w, [], timeout)
+
+ __all__.append('JythonSelectSelector')
+ SelectSelector = JythonSelectSelector # Override so the wrong selector isn't used.
+
+
+if hasattr(select, "poll"):
+ class PollSelector(BaseSelector):
+ """ Poll-based selector """
+ def __init__(self):
+ super(PollSelector, self).__init__()
+ self._poll = select.poll()
+
+ def register(self, fileobj, events, data=None):
+ key = super(PollSelector, self).register(fileobj, events, data)
+ event_mask = 0
+ if events & EVENT_READ:
+ event_mask |= select.POLLIN
+ if events & EVENT_WRITE:
+ event_mask |= select.POLLOUT
+ self._poll.register(key.fd, event_mask)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(PollSelector, self).unregister(fileobj)
+ self._poll.unregister(key.fd)
+ return key
+
+ def _wrap_poll(self, timeout=None):
+ """ Wrapper function for select.poll.poll() so that
+ _syscall_wrapper can work with only seconds. """
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0
+ else:
+ # select.poll.poll() has a resolution of 1 millisecond,
+ # round away from zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1000)
+
+ result = self._poll.poll(timeout)
+ return result
+
+ def select(self, timeout=None):
+ ready = []
+ fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+
+ return ready
+
+ __all__.append('PollSelector')
+
+if hasattr(select, "epoll"):
+ class EpollSelector(BaseSelector):
+ """ Epoll-based selector """
+ def __init__(self):
+ super(EpollSelector, self).__init__()
+ self._epoll = select.epoll()
+
+ def fileno(self):
+ return self._epoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(EpollSelector, self).register(fileobj, events, data)
+ events_mask = 0
+ if events & EVENT_READ:
+ events_mask |= select.EPOLLIN
+ if events & EVENT_WRITE:
+ events_mask |= select.EPOLLOUT
+ _syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(EpollSelector, self).unregister(fileobj)
+ try:
+ _syscall_wrapper(self._epoll.unregister, False, key.fd)
+ except _ERROR_TYPES:
+ # This can occur when the fd was closed since registry.
+ pass
+ return key
+
+ def select(self, timeout=None):
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0.0
+ else:
+ # select.epoll.poll() has a resolution of 1 millisecond
+ # but luckily takes seconds so we don't need a wrapper
+ # like PollSelector. Just for better rounding.
+ timeout = math.ceil(timeout * 1000) * 0.001
+ timeout = float(timeout)
+ else:
+ timeout = -1.0 # epoll.poll() must have a float.
+
+ # We always want at least 1 to ensure that select can be called
+ # with no file descriptors registered. Otherwise will fail.
+ max_events = max(len(self._fd_to_key), 1)
+
+ ready = []
+ fd_events = _syscall_wrapper(self._epoll.poll, True,
+ timeout=timeout,
+ maxevents=max_events)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.EPOLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.EPOLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def close(self):
+ self._epoll.close()
+ super(EpollSelector, self).close()
+
+ __all__.append('EpollSelector')
+
+
+if hasattr(select, "devpoll"):
+ class DevpollSelector(BaseSelector):
+ """Solaris /dev/poll selector."""
+
+ def __init__(self):
+ super(DevpollSelector, self).__init__()
+ self._devpoll = select.devpoll()
+
+ def fileno(self):
+ return self._devpoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(DevpollSelector, self).register(fileobj, events, data)
+ poll_events = 0
+ if events & EVENT_READ:
+ poll_events |= select.POLLIN
+ if events & EVENT_WRITE:
+ poll_events |= select.POLLOUT
+ self._devpoll.register(key.fd, poll_events)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(DevpollSelector, self).unregister(fileobj)
+ self._devpoll.unregister(key.fd)
+ return key
+
+ def _wrap_poll(self, timeout=None):
+ """ Wrapper function for select.poll.poll() so that
+ _syscall_wrapper can work with only seconds. """
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0
+ else:
+ # select.devpoll.poll() has a resolution of 1 millisecond,
+ # round away from zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1000)
+
+ result = self._devpoll.poll(timeout)
+ return result
+
+ def select(self, timeout=None):
+ ready = []
+ fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+
+ return ready
+
+ def close(self):
+ self._devpoll.close()
+ super(DevpollSelector, self).close()
+
+ __all__.append('DevpollSelector')
+
+
+if hasattr(select, "kqueue"):
+ class KqueueSelector(BaseSelector):
+ """ Kqueue / Kevent-based selector """
+ def __init__(self):
+ super(KqueueSelector, self).__init__()
+ self._kqueue = select.kqueue()
+
+ def fileno(self):
+ return self._kqueue.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(KqueueSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_READ,
+ select.KQ_EV_ADD)
+
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+
+ if events & EVENT_WRITE:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_WRITE,
+ select.KQ_EV_ADD)
+
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+
+ return key
+
+ def unregister(self, fileobj):
+ key = super(KqueueSelector, self).unregister(fileobj)
+ if key.events & EVENT_READ:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_READ,
+ select.KQ_EV_DELETE)
+ try:
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+ except _ERROR_TYPES:
+ pass
+ if key.events & EVENT_WRITE:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_WRITE,
+ select.KQ_EV_DELETE)
+ try:
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+ except _ERROR_TYPES:
+ pass
+
+ return key
+
+ def select(self, timeout=None):
+ if timeout is not None:
+ timeout = max(timeout, 0)
+
+ max_events = len(self._fd_to_key) * 2
+ ready_fds = {}
+
+ kevent_list = _syscall_wrapper(self._kqueue.control, True,
+ None, max_events, timeout)
+
+ for kevent in kevent_list:
+ fd = kevent.ident
+ event_mask = kevent.filter
+ events = 0
+ if event_mask == select.KQ_FILTER_READ:
+ events |= EVENT_READ
+ if event_mask == select.KQ_FILTER_WRITE:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ if key.fd not in ready_fds:
+ ready_fds[key.fd] = (key, events & key.events)
+ else:
+ old_events = ready_fds[key.fd][1]
+ ready_fds[key.fd] = (key, (events | old_events) & key.events)
+
+ return list(ready_fds.values())
+
+ def close(self):
+ self._kqueue.close()
+ super(KqueueSelector, self).close()
+
+ __all__.append('KqueueSelector')
+
+
+def _can_allocate(struct):
+ """ Checks that select structs can be allocated by the underlying
+ operating system, not just advertised by the select module. We don't
+ check select() because we'll be hopeful that most platforms that
+ don't have it available will not advertise it. (ie: GAE) """
+ try:
+ # select.poll() objects won't fail until used.
+ if struct == 'poll':
+ p = select.poll()
+ p.poll(0)
+
+ # All others will fail on allocation.
+ else:
+ getattr(select, struct)().close()
+ return True
+ except (OSError, AttributeError):
+ return False
+
+
+# Python 3.5 uses a more direct route to wrap system calls to increase speed.
+if sys.version_info >= (3, 5):
+ def _syscall_wrapper(func, _, *args, **kwargs):
+ """ This is the short-circuit version of the below logic
+ because in Python 3.5+ all selectors restart system calls. """
+ return func(*args, **kwargs)
+else:
+ def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
+ """ Wrapper function for syscalls that could fail due to EINTR.
+ All functions should be retried if there is time left in the timeout
+ in accordance with PEP 475. """
+ timeout = kwargs.get("timeout", None)
+ if timeout is None:
+ expires = None
+ recalc_timeout = False
+ else:
+ timeout = float(timeout)
+ if timeout < 0.0: # Timeout less than 0 treated as no timeout.
+ expires = None
+ else:
+ expires = monotonic() + timeout
+
+ args = list(args)
+ if recalc_timeout and "timeout" not in kwargs:
+ raise ValueError(
+ "Timeout must be in args or kwargs to be recalculated")
+
+ result = _SYSCALL_SENTINEL
+ while result is _SYSCALL_SENTINEL:
+ try:
+ result = func(*args, **kwargs)
+ # OSError is thrown by select.select
+ # IOError is thrown by select.epoll.poll
+ # select.error is thrown by select.poll.poll
+ # Aren't we thankful for Python 3.x rework for exceptions?
+ except (OSError, IOError, select.error) as e:
+ # select.error wasn't a subclass of OSError in the past.
+ errcode = None
+ if hasattr(e, "errno"):
+ errcode = e.errno
+ elif hasattr(e, "args"):
+ errcode = e.args[0]
+
+ # Also test for the Windows equivalent of EINTR.
+ is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
+ errcode == errno.WSAEINTR))
+
+ if is_interrupt:
+ if expires is not None:
+ current_time = monotonic()
+ if current_time > expires:
+ raise OSError(errno=errno.ETIMEDOUT)
+ if recalc_timeout:
+ if "timeout" in kwargs:
+ kwargs["timeout"] = expires - current_time
+ continue
+ raise
+ return result
+
+
+# Choose the best implementation, roughly:
+# kqueue == devpoll == epoll > poll > select
+# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
+def DefaultSelector():
+ """ This function serves as a first call for DefaultSelector to
+ detect if the select module is being monkey-patched incorrectly
+ by eventlet, greenlet, and preserve proper behavior. """
+ global _DEFAULT_SELECTOR
+ if _DEFAULT_SELECTOR is None:
+ if pycompat.isjython:
+ _DEFAULT_SELECTOR = JythonSelectSelector
+ elif _can_allocate('kqueue'):
+ _DEFAULT_SELECTOR = KqueueSelector
+ elif _can_allocate('devpoll'):
+ _DEFAULT_SELECTOR = DevpollSelector
+ elif _can_allocate('epoll'):
+ _DEFAULT_SELECTOR = EpollSelector
+ elif _can_allocate('poll'):
+ _DEFAULT_SELECTOR = PollSelector
+ elif hasattr(select, 'select'):
+ _DEFAULT_SELECTOR = SelectSelector
+ else: # Platform-specific: AppEngine
+ raise RuntimeError('Platform does not have a selector.')
+ return _DEFAULT_SELECTOR()
--- a/mercurial/ui.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/ui.py Mon Jan 22 17:53:02 2018 -0500
@@ -49,6 +49,10 @@
[ui]
# The rollback command is dangerous. As a rule, don't use it.
rollback = False
+# Make `hg status` report copy information
+statuscopies = yes
+# Prefer curses UIs when available. Revert to plain-text with `text`.
+interface = curses
[commands]
# Make `hg status` emit cwd-relative paths by default.
@@ -58,6 +62,7 @@
[diff]
git = 1
+showfunc = 1
"""
samplehgrcs = {
@@ -695,6 +700,9 @@
>>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
>>> u.configlist(s, b'list1')
['this', 'is', 'a small', 'test']
+ >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
+ >>> u.configlist(s, b'list2')
+ ['this', 'is', 'a small', 'test']
"""
# default is not always a list
v = self.configwith(config.parselist, section, name, default,
@@ -886,9 +894,9 @@
"cmdname.type" is recommended. For example, status issues
a label of "status.modified" for modified files.
'''
- if self._buffers and not opts.get('prompt', False):
+ if self._buffers and not opts.get(r'prompt', False):
if self._bufferapplylabels:
- label = opts.get('label', '')
+ label = opts.get(r'label', '')
self._buffers[-1].extend(self.label(a, label) for a in args)
else:
self._buffers[-1].extend(args)
@@ -899,7 +907,7 @@
else:
msgs = args
if self._colormode is not None:
- label = opts.get('label', '')
+ label = opts.get(r'label', '')
msgs = [self.label(a, label) for a in args]
self._write(*msgs, **opts)
@@ -927,7 +935,7 @@
else:
msgs = args
if self._colormode is not None:
- label = opts.get('label', '')
+ label = opts.get(r'label', '')
msgs = [self.label(a, label) for a in args]
self._write_err(*msgs, **opts)
@@ -1602,7 +1610,7 @@
stack.
"""
if not self.configbool('devel', 'all-warnings'):
- if config is not None and not self.configbool('devel', config):
+ if config is None or not self.configbool('devel', config):
return
msg = 'devel-warn: ' + msg
stacklevel += 1 # get in develwarn
--- a/mercurial/upgrade.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/upgrade.py Mon Jan 22 17:53:02 2018 -0500
@@ -14,6 +14,8 @@
from . import (
changelog,
error,
+ filelog,
+ hg,
localrepo,
manifest,
revlog,
@@ -94,6 +96,9 @@
'generaldelta',
}
+def preservedrequirements(repo):
+ return set()
+
deficiency = 'deficiency'
optimisation = 'optimization'
@@ -256,7 +261,7 @@
@registerformatvariant
class removecldeltachain(formatvariant):
- name = 'removecldeltachain'
+ name = 'plain-cl-delta'
default = True
@@ -281,6 +286,28 @@
def fromconfig(repo):
return True
+@registerformatvariant
+class compressionengine(formatvariant):
+ name = 'compression'
+ default = 'zlib'
+
+ description = _('Compresion algorithm used to compress data. '
+ 'Some engine are faster than other')
+
+ upgrademessage = _('revlog content will be recompressed with the new '
+ 'algorithm.')
+
+ @classmethod
+ def fromrepo(cls, repo):
+ for req in repo.requirements:
+ if req.startswith('exp-compression-'):
+ return req.split('-', 2)[2]
+ return 'zlib'
+
+ @classmethod
+ def fromconfig(cls, repo):
+ return repo.ui.config('experimental', 'format.compression')
+
def finddeficiencies(repo):
"""returns a list of deficiencies that the repo suffer from"""
deficiencies = []
@@ -342,6 +369,19 @@
'recomputed; this will likely drastically slow down '
'execution time')))
+ optimizations.append(improvement(
+ name='redeltafulladd',
+ type=optimisation,
+ description=_('every revision will be re-added as if it was new '
+ 'content. It will go through the full storage '
+ 'mechanism giving extensions a chance to process it '
+ '(eg. lfs). This is similar to "redeltaall" but even '
+ 'slower since more logic is involved.'),
+ upgrademessage=_('each revision will be added as new content to the '
+ 'internal storage; this will likely drastically slow '
+ 'down execution time, but some extensions might need '
+ 'it')))
+
return optimizations
def determineactions(repo, deficiencies, sourcereqs, destreqs):
@@ -387,9 +427,8 @@
mandir = path[:-len('00manifest.i')]
return manifest.manifestrevlog(repo.svfs, dir=mandir)
else:
- # Filelogs don't do anything special with settings. So we can use a
- # vanilla revlog.
- return revlog.revlog(repo.svfs, path)
+ #reverse of "/".join(("data", path + ".i"))
+ return filelog.filelog(repo.svfs, path[5:-2])
def _copyrevlogs(ui, srcrepo, dstrepo, tr, deltareuse, aggressivemergedeltas):
"""Copy revlogs between 2 repos."""
@@ -592,6 +631,8 @@
deltareuse = revlog.revlog.DELTAREUSESAMEREVS
elif 'redeltamultibase' in actions:
deltareuse = revlog.revlog.DELTAREUSESAMEREVS
+ elif 'redeltafulladd' in actions:
+ deltareuse = revlog.revlog.DELTAREUSEFULLADD
else:
deltareuse = revlog.revlog.DELTAREUSEALWAYS
@@ -679,6 +720,7 @@
# FUTURE there is potentially a need to control the wanted requirements via
# command arguments or via an extension hook point.
newreqs = localrepo.newreporequirements(repo)
+ newreqs.update(preservedrequirements(repo))
noremovereqs = (repo.requirements - newreqs -
supportremovedrequirements(repo))
@@ -804,9 +846,10 @@
try:
ui.write(_('creating temporary repository to stage migrated '
'data: %s\n') % tmppath)
- dstrepo = localrepo.localrepository(repo.baseui,
- path=tmppath,
- create=True)
+
+ # clone ui without using ui.copy because repo.ui is protected
+ repoui = repo.ui.__class__(repo.ui)
+ dstrepo = hg.repository(repoui, path=tmppath, create=True)
with dstrepo.wlock(), dstrepo.lock():
backuppath = _upgraderepo(ui, repo, dstrepo, newreqs,
--- a/mercurial/url.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/url.py Mon Jan 22 17:53:02 2018 -0500
@@ -466,7 +466,7 @@
handlerfuncs = []
-def opener(ui, authinfo=None):
+def opener(ui, authinfo=None, useragent=None):
'''
construct an opener suitable for urllib2
authinfo will be added to the password manager
@@ -512,8 +512,14 @@
# own distribution name. Since servers should not be using the user
# agent string for anything, clients should be able to define whatever
# user agent they deem appropriate.
- agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
- opener.addheaders = [(r'User-agent', pycompat.sysstr(agent))]
+ #
+ # The custom user agent is for lfs, because unfortunately some servers
+ # do look at this value.
+ if not useragent:
+ agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
+ opener.addheaders = [(r'User-agent', pycompat.sysstr(agent))]
+ else:
+ opener.addheaders = [(r'User-agent', pycompat.sysstr(useragent))]
# This header should only be needed by wire protocol requests. But it has
# been sent on all requests since forever. We keep sending it for backwards
--- a/mercurial/util.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/util.py Mon Jan 22 17:53:02 2018 -0500
@@ -49,6 +49,7 @@
encoding,
error,
i18n,
+ node as nodemod,
policy,
pycompat,
urllibcompat,
@@ -109,6 +110,8 @@
expandglobs = platform.expandglobs
explainexit = platform.explainexit
findexe = platform.findexe
+getfsmountpoint = platform.getfsmountpoint
+getfstype = platform.getfstype
gethgcmd = platform.gethgcmd
getuser = platform.getuser
getpid = os.getpid
@@ -163,6 +166,10 @@
setprocname = osutil.setprocname
except AttributeError:
pass
+try:
+ unblocksignal = osutil.unblocksignal
+except AttributeError:
+ pass
# Python compatibility
@@ -259,7 +266,7 @@
def __getitem__(self, key):
if key not in DIGESTS:
raise Abort(_('unknown digest type: %s') % k)
- return self._hashes[key].hexdigest()
+ return nodemod.hex(self._hashes[key].digest())
def __iter__(self):
return iter(self._hashes)
@@ -931,6 +938,11 @@
# __dict__ assignment required to bypass __setattr__ (eg: repoview)
obj.__dict__[self.name] = value
+def clearcachedproperty(obj, prop):
+ '''clear a cached property value, if one has been set'''
+ if prop in obj.__dict__:
+ del obj.__dict__[prop]
+
def pipefilter(s, cmd):
'''filter string S through command CMD, returning its output'''
p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
@@ -1196,6 +1208,7 @@
'ext4',
'hfs',
'jfs',
+ 'NTFS',
'reiserfs',
'tmpfs',
'ufs',
@@ -1510,13 +1523,6 @@
return ''.join(result)
-def getfstype(dirpath):
- '''Get the filesystem type name from a directory (best-effort)
-
- Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
- '''
- return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
-
def checknlink(testfile):
'''check whether hardlink count reporting works properly'''
@@ -2662,7 +2668,7 @@
else:
prefix_char = prefix
mapping[prefix_char] = prefix_char
- r = remod.compile(r'%s(%s)' % (prefix, patterns))
+ r = remod.compile(br'%s(%s)' % (prefix, patterns))
return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
def getport(port):
@@ -3859,3 +3865,82 @@
fn = '%s~%s~%s' % (f, tag, n)
if fn not in ctx and fn not in others:
return fn
+
+def readexactly(stream, n):
+ '''read n bytes from stream.read and abort if less was available'''
+ s = stream.read(n)
+ if len(s) < n:
+ raise error.Abort(_("stream ended unexpectedly"
+ " (got %d bytes, expected %d)")
+ % (len(s), n))
+ return s
+
+def uvarintencode(value):
+ """Encode an unsigned integer value to a varint.
+
+ A varint is a variable length integer of 1 or more bytes. Each byte
+ except the last has the most significant bit set. The lower 7 bits of
+ each byte store the 2's complement representation, least significant group
+ first.
+
+ >>> uvarintencode(0)
+ '\\x00'
+ >>> uvarintencode(1)
+ '\\x01'
+ >>> uvarintencode(127)
+ '\\x7f'
+ >>> uvarintencode(1337)
+ '\\xb9\\n'
+ >>> uvarintencode(65536)
+ '\\x80\\x80\\x04'
+ >>> uvarintencode(-1)
+ Traceback (most recent call last):
+ ...
+ ProgrammingError: negative value for uvarint: -1
+ """
+ if value < 0:
+ raise error.ProgrammingError('negative value for uvarint: %d'
+ % value)
+ bits = value & 0x7f
+ value >>= 7
+ bytes = []
+ while value:
+ bytes.append(pycompat.bytechr(0x80 | bits))
+ bits = value & 0x7f
+ value >>= 7
+ bytes.append(pycompat.bytechr(bits))
+
+ return ''.join(bytes)
+
+def uvarintdecodestream(fh):
+ """Decode an unsigned variable length integer from a stream.
+
+ The passed argument is anything that has a ``.read(N)`` method.
+
+ >>> try:
+ ... from StringIO import StringIO as BytesIO
+ ... except ImportError:
+ ... from io import BytesIO
+ >>> uvarintdecodestream(BytesIO(b'\\x00'))
+ 0
+ >>> uvarintdecodestream(BytesIO(b'\\x01'))
+ 1
+ >>> uvarintdecodestream(BytesIO(b'\\x7f'))
+ 127
+ >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
+ 1337
+ >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
+ 65536
+ >>> uvarintdecodestream(BytesIO(b'\\x80'))
+ Traceback (most recent call last):
+ ...
+ Abort: stream ended unexpectedly (got 0 bytes, expected 1)
+ """
+ result = 0
+ shift = 0
+ while True:
+ byte = ord(readexactly(fh, 1))
+ result |= ((byte & 0x7f) << shift)
+ if not (byte & 0x80):
+ return result
+ shift += 7
--- a/mercurial/verify.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/verify.py Mon Jan 22 17:53:02 2018 -0500
@@ -17,6 +17,7 @@
from . import (
error,
+ pycompat,
revlog,
scmutil,
util,
@@ -105,7 +106,8 @@
if self.lrugetctx(l)[f].filenode() == node]
except Exception:
pass
- self.warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
+ self.warn(_(" (expected %s)") % " ".join
+ (map(pycompat.bytestr, linkrevs)))
lr = None # can't be trusted
try:
--- a/mercurial/vfs.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/vfs.py Mon Jan 22 17:53:02 2018 -0500
@@ -83,8 +83,8 @@
with self(path, mode=mode) as fp:
return fp.readlines()
- def write(self, path, data, backgroundclose=False):
- with self(path, 'wb', backgroundclose=backgroundclose) as fp:
+ def write(self, path, data, backgroundclose=False, **kwargs):
+ with self(path, 'wb', backgroundclose=backgroundclose, **kwargs) as fp:
return fp.write(data)
def writelines(self, path, data, mode='wb', notindexed=False):
@@ -170,9 +170,9 @@
def mkdir(self, path=None):
return os.mkdir(self.join(path))
- def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
+ def mkstemp(self, suffix='', prefix='tmp', dir=None):
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
- dir=self.join(dir), text=text)
+ dir=self.join(dir))
dname, fname = util.split(name)
if dir:
return fd, os.path.join(dir, fname)
@@ -277,8 +277,12 @@
to ``__call__``/``open`` to result in the file possibly being closed
asynchronously, on a background thread.
"""
- # This is an arbitrary restriction and could be changed if we ever
- # have a use case.
+ # Sharing backgroundfilecloser between threads is complex and using
+ # multiple instances puts us at risk of running out of file descriptors
+ # only allow to use backgroundfilecloser when in main thread.
+ if not isinstance(threading.currentThread(), threading._MainThread):
+ yield
+ return
vfs = getattr(self, 'vfs', self)
if getattr(vfs, '_backgroundfilecloser', None):
raise error.Abort(
@@ -329,9 +333,8 @@
return
os.chmod(name, self.createmode & 0o666)
- def __call__(self, path, mode="r", text=False, atomictemp=False,
- notindexed=False, backgroundclose=False, checkambig=False,
- auditpath=True):
+ def __call__(self, path, mode="r", atomictemp=False, notindexed=False,
+ backgroundclose=False, checkambig=False, auditpath=True):
'''Open ``path`` file, which is relative to vfs root.
Newly created directories are marked as "not to be indexed by
@@ -369,7 +372,7 @@
self.audit(path, mode=mode)
f = self.join(path)
- if not text and "b" not in mode:
+ if "b" not in mode:
mode += "b" # for that other OS
nlink = -1
@@ -413,7 +416,8 @@
' valid for checkambig=True') % mode)
fp = checkambigatclosing(fp)
- if backgroundclose:
+ if (backgroundclose and
+ isinstance(threading.currentThread(), threading._MainThread)):
if not self._backgroundfilecloser:
raise error.Abort(_('backgroundclose can only be used when a '
'backgroundclosing context manager is active')
--- a/mercurial/win32.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/win32.py Mon Jan 22 17:53:02 2018 -0500
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import ctypes
+import ctypes.wintypes as wintypes
import errno
import msvcrt
import os
@@ -33,6 +34,7 @@
_HANDLE = ctypes.c_void_p
_HWND = _HANDLE
_PCCERT_CONTEXT = ctypes.c_void_p
+_MAX_PATH = wintypes.MAX_PATH
_INVALID_HANDLE_VALUE = _HANDLE(-1).value
@@ -223,6 +225,24 @@
_kernel32.SetFileAttributesA.argtypes = [_LPCSTR, _DWORD]
_kernel32.SetFileAttributesA.restype = _BOOL
+_DRIVE_UNKNOWN = 0
+_DRIVE_NO_ROOT_DIR = 1
+_DRIVE_REMOVABLE = 2
+_DRIVE_FIXED = 3
+_DRIVE_REMOTE = 4
+_DRIVE_CDROM = 5
+_DRIVE_RAMDISK = 6
+
+_kernel32.GetDriveTypeA.argtypes = [_LPCSTR]
+_kernel32.GetDriveTypeA.restype = _UINT
+
+_kernel32.GetVolumeInformationA.argtypes = [_LPCSTR, ctypes.c_void_p, _DWORD,
+ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, _DWORD]
+_kernel32.GetVolumeInformationA.restype = _BOOL
+
+_kernel32.GetVolumePathNameA.argtypes = [_LPCSTR, ctypes.c_void_p, _DWORD]
+_kernel32.GetVolumePathNameA.restype = _BOOL
+
_kernel32.OpenProcess.argtypes = [_DWORD, _BOOL, _DWORD]
_kernel32.OpenProcess.restype = _HANDLE
@@ -410,6 +430,49 @@
raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
return buf.value
+def getvolumename(path):
+ """Get the mount point of the filesystem from a directory or file
+ (best-effort)
+
+ Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
+ """
+ # realpath() calls GetFullPathName()
+ realpath = os.path.realpath(path)
+
+ # allocate at least MAX_PATH long since GetVolumePathName('c:\\', buf, 4)
+ # somehow fails on Windows XP
+ size = max(len(realpath), _MAX_PATH) + 1
+ buf = ctypes.create_string_buffer(size)
+
+ if not _kernel32.GetVolumePathNameA(realpath, ctypes.byref(buf), size):
+ raise ctypes.WinError() # Note: WinError is a function
+
+ return buf.value
+
+def getfstype(path):
+ """Get the filesystem type name from a directory or file (best-effort)
+
+ Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
+ """
+ volume = getvolumename(path)
+
+ t = _kernel32.GetDriveTypeA(volume)
+
+ if t == _DRIVE_REMOTE:
+ return 'cifs'
+ elif t not in (_DRIVE_REMOVABLE, _DRIVE_FIXED, _DRIVE_CDROM,
+ _DRIVE_RAMDISK):
+ return None
+
+ size = _MAX_PATH + 1
+ name = ctypes.create_string_buffer(size)
+
+ if not _kernel32.GetVolumeInformationA(volume, None, 0, None, None, None,
+ ctypes.byref(name), size):
+ raise ctypes.WinError() # Note: WinError is a function
+
+ return name.value
+
def getuser():
'''return name of current user'''
size = _DWORD(300)
--- a/mercurial/windows.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/windows.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,6 +32,8 @@
osutil = policy.importmod(r'osutil')
executablepath = win32.executablepath
+getfsmountpoint = win32.getvolumename
+getfstype = win32.getfstype
getuser = win32.getuser
hidewindow = win32.hidewindow
makedir = win32.makedir
--- a/mercurial/wireproto.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/wireproto.py Mon Jan 22 17:53:02 2018 -0500
@@ -205,13 +205,16 @@
# :scsv: list of comma-separated values return as set
# :plain: string with no transformation needed.
gboptsmap = {'heads': 'nodes',
+ 'bookmarks': 'boolean',
'common': 'nodes',
'obsmarkers': 'boolean',
'phases': 'boolean',
'bundlecaps': 'scsv',
'listkeys': 'csv',
'cg': 'boolean',
- 'cbattempted': 'boolean'}
+ 'cbattempted': 'boolean',
+ 'stream': 'boolean',
+}
# client side
@@ -451,9 +454,9 @@
# don't pass optional arguments left at their default value
opts = {}
if three is not None:
- opts['three'] = three
+ opts[r'three'] = three
if four is not None:
- opts['four'] = four
+ opts[r'four'] = four
return self._call('debugwireargs', one=one, two=two, **opts)
def _call(self, cmd, **args):
@@ -519,18 +522,28 @@
The call was successful and the result is a stream.
- Accepts either a generator or an object with a ``read(size)`` method.
+ Accepts a generator containing chunks of data to be sent to the client.
+
+ ``prefer_uncompressed`` indicates that the data is expected to be
+ uncompressable and that the stream should therefore use the ``none``
+ engine.
+ """
+ def __init__(self, gen=None, prefer_uncompressed=False):
+ self.gen = gen
+ self.prefer_uncompressed = prefer_uncompressed
- ``v1compressible`` indicates whether this data can be compressed to
- "version 1" clients (technically: HTTP peers using
- application/mercurial-0.1 media type). This flag should NOT be used on
- new commands because new clients should support a more modern compression
- mechanism.
+class streamres_legacy(object):
+ """wireproto reply: uncompressed binary stream
+
+ The call was successful and the result is a stream.
+
+ Accepts a generator containing chunks of data to be sent to the client.
+
+ Like ``streamres``, but sends an uncompressed data for "version 1" clients
+ using the application/mercurial-0.1 media type.
"""
- def __init__(self, gen=None, reader=None, v1compressible=False):
+ def __init__(self, gen=None):
self.gen = gen
- self.reader = reader
- self.v1compressible = v1compressible
class pushres(object):
"""wireproto reply: success with simple integer return
@@ -767,7 +780,7 @@
else:
caps.append('streamreqs=%s' % ','.join(sorted(requiredformats)))
if repo.ui.configbool('experimental', 'bundle2-advertise'):
- capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
+ capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role='server'))
caps.append('bundle2=' + urlreq.quote(capsblob))
caps.append('unbundle=%s' % ','.join(bundle2.bundlepriority))
@@ -801,7 +814,8 @@
outgoing = discovery.outgoing(repo, missingroots=nodes,
missingheads=repo.heads())
cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
- return streamres(reader=cg, v1compressible=True)
+ gen = iter(lambda: cg.read(32768), '')
+ return streamres(gen=gen)
@wireprotocommand('changegroupsubset', 'bases heads')
def changegroupsubset(repo, proto, bases, heads):
@@ -810,13 +824,14 @@
outgoing = discovery.outgoing(repo, missingroots=bases,
missingheads=heads)
cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
- return streamres(reader=cg, v1compressible=True)
+ gen = iter(lambda: cg.read(32768), '')
+ return streamres(gen=gen)
@wireprotocommand('debugwireargs', 'one two *')
def debugwireargs(repo, proto, one, two, others):
# only accept optional args from the known set
opts = options('debugwireargs', ['three', 'four'], others)
- return repo.debugwireargs(one, two, **opts)
+ return repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
@wireprotocommand('getbundle', '*')
def getbundle(repo, proto, others):
@@ -847,20 +862,24 @@
raise error.Abort(bundle2requiredmain,
hint=bundle2requiredhint)
+ prefercompressed = True
+
try:
if repo.ui.configbool('server', 'disablefullbundle'):
# Check to see if this is a full clone.
clheads = set(repo.changelog.heads())
+ changegroup = opts.get('cg', True)
heads = set(opts.get('heads', set()))
common = set(opts.get('common', set()))
common.discard(nullid)
- if not common and clheads == heads:
+ if changegroup and not common and clheads == heads:
raise error.Abort(
_('server has pull-based clones disabled'),
hint=_('remove --pull if specified or upgrade Mercurial'))
- chunks = exchange.getbundlechunks(repo, 'serve',
- **pycompat.strkwargs(opts))
+ info, chunks = exchange.getbundlechunks(repo, 'serve',
+ **pycompat.strkwargs(opts))
+ prefercompressed = info.get('prefercompressed', True)
except error.Abort as exc:
# cleanly forward Abort error to the client
if not exchange.bundle2requested(opts.get('bundlecaps')):
@@ -875,8 +894,10 @@
advargs.append(('hint', exc.hint))
bundler.addpart(bundle2.bundlepart('error:abort',
manargs, advargs))
- return streamres(gen=bundler.getchunks(), v1compressible=True)
- return streamres(gen=chunks, v1compressible=True)
+ chunks = bundler.getchunks()
+ prefercompressed = False
+
+ return streamres(gen=chunks, prefer_uncompressed=not prefercompressed)
@wireprotocommand('heads')
def heads(repo, proto):
@@ -953,21 +974,7 @@
capability with a value representing the version and flags of the repo
it is serving. Client checks to see if it understands the format.
'''
- if not streamclone.allowservergeneration(repo):
- return '1\n'
-
- def getstream(it):
- yield '0\n'
- for chunk in it:
- yield chunk
-
- try:
- # LockError may be raised before the first result is yielded. Don't
- # emit output until we're sure we got the lock successfully.
- it = streamclone.generatev1wireproto(repo)
- return streamres(gen=getstream(it))
- except error.LockError:
- return '2\n'
+ return streamres_legacy(streamclone.generatev1wireproto(repo))
@wireprotocommand('unbundle', 'heads')
def unbundle(repo, proto, heads):
@@ -1002,7 +1009,7 @@
if util.safehasattr(r, 'addpart'):
# The return looks streamable, we are in the bundle2 case and
# should return a stream.
- return streamres(gen=r.getchunks())
+ return streamres_legacy(gen=r.getchunks())
return pushres(r)
finally:
@@ -1066,4 +1073,4 @@
manargs, advargs))
except error.PushRaced as exc:
bundler.newpart('error:pushraced', [('message', str(exc))])
- return streamres(gen=bundler.getchunks())
+ return streamres_legacy(gen=bundler.getchunks())
--- a/mercurial/worker.py Mon Jan 08 16:07:51 2018 -0800
+++ b/mercurial/worker.py Mon Jan 22 17:53:02 2018 -0500
@@ -11,6 +11,8 @@
import os
import signal
import sys
+import threading
+import time
from .i18n import _
from . import (
@@ -53,7 +55,7 @@
raise error.Abort(_('number of cpus must be an integer'))
return min(max(countcpus(), 4), 32)
-if pycompat.isposix:
+if pycompat.isposix or pycompat.iswindows:
_startupcost = 0.01
else:
_startupcost = 1e30
@@ -81,7 +83,8 @@
args - arguments to split into chunks, to pass to individual
workers
'''
- if worthwhile(ui, costperarg, len(args)):
+ enabled = ui.configbool('worker', 'enabled')
+ if enabled and worthwhile(ui, costperarg, len(args)):
return _platformworker(ui, func, staticargs, args)
return func(*staticargs + (args,))
@@ -203,7 +206,91 @@
elif os.WIFSIGNALED(code):
return -os.WTERMSIG(code)
-if not pycompat.iswindows:
+def _windowsworker(ui, func, staticargs, args):
+ class Worker(threading.Thread):
+ def __init__(self, taskqueue, resultqueue, func, staticargs,
+ group=None, target=None, name=None, verbose=None):
+ threading.Thread.__init__(self, group=group, target=target,
+ name=name, verbose=verbose)
+ self._taskqueue = taskqueue
+ self._resultqueue = resultqueue
+ self._func = func
+ self._staticargs = staticargs
+ self._interrupted = False
+ self.daemon = True
+ self.exception = None
+
+ def interrupt(self):
+ self._interrupted = True
+
+ def run(self):
+ try:
+ while not self._taskqueue.empty():
+ try:
+ args = self._taskqueue.get_nowait()
+ for res in self._func(*self._staticargs + (args,)):
+ self._resultqueue.put(res)
+ # threading doesn't provide a native way to
+ # interrupt execution. handle it manually at every
+ # iteration.
+ if self._interrupted:
+ return
+ except util.empty:
+ break
+ except Exception as e:
+ # store the exception such that the main thread can resurface
+ # it as if the func was running without workers.
+ self.exception = e
+ raise
+
+ threads = []
+ def trykillworkers():
+ # Allow up to 1 second to clean worker threads nicely
+ cleanupend = time.time() + 1
+ for t in threads:
+ t.interrupt()
+ for t in threads:
+ remainingtime = cleanupend - time.time()
+ t.join(remainingtime)
+ if t.is_alive():
+ # pass over the workers joining failure. it is more
+ # important to surface the inital exception than the
+ # fact that one of workers may be processing a large
+ # task and does not get to handle the interruption.
+ ui.warn(_("failed to kill worker threads while "
+ "handling an exception\n"))
+ return
+
+ workers = _numworkers(ui)
+ resultqueue = util.queue()
+ taskqueue = util.queue()
+ # partition work to more pieces than workers to minimize the chance
+ # of uneven distribution of large tasks between the workers
+ for pargs in partition(args, workers * 20):
+ taskqueue.put(pargs)
+ for _i in range(workers):
+ t = Worker(taskqueue, resultqueue, func, staticargs)
+ threads.append(t)
+ t.start()
+ try:
+ while len(threads) > 0:
+ while not resultqueue.empty():
+ yield resultqueue.get()
+ threads[0].join(0.05)
+ finishedthreads = [_t for _t in threads if not _t.is_alive()]
+ for t in finishedthreads:
+ if t.exception is not None:
+ raise t.exception
+ threads.remove(t)
+ except (Exception, KeyboardInterrupt): # re-raises
+ trykillworkers()
+ raise
+ while not resultqueue.empty():
+ yield resultqueue.get()
+
+if pycompat.iswindows:
+ _platformworker = _windowsworker
+else:
_platformworker = _posixworker
_exitstatus = _posixexitstatus
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/.cargo/config Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,7 @@
+# Rust builds with a modern MSVC and uses a newer CRT.
+# Python 2.7 has a shared library dependency on an older CRT (msvcr90.dll).
+# We statically link the modern CRT to avoid multiple msvcr*.dll libraries
+# being loaded and Python possibly picking up symbols from the newer runtime
+# (which would be loaded first).
+[target.'cfg(target_os = "windows")']
+rustflags = ["-Ctarget-feature=+crt-static"]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/Cargo.lock Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,127 @@
+[[package]]
+name = "aho-corasick"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cpython"
+version = "0.1.0"
+source = "git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52#c90d65cf84abfffce7ef54476bbfed56017a2f52"
+dependencies = [
+ "libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
+]
+
+[[package]]
+name = "hgcli"
+version = "0.1.0"
+dependencies = [
+ "cpython 0.1.0 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
+ "libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.1.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "python27-sys"
+version = "0.1.2"
+source = "git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52#c90d65cf84abfffce7ef54476bbfed56017a2f52"
+dependencies = [
+ "libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex"
+version = "0.1.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "thread-id"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
+"checksum cpython 0.1.0 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)" = "<none>"
+"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+"checksum libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)" = "96264e9b293e95d25bfcbbf8a88ffd1aedc85b754eba8b7d78012f638ba220eb"
+"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
+"checksum num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "cacfcab5eb48250ee7d0c7896b51a2c5eec99c1feea5f32025635f5ae4b00070"
+"checksum python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)" = "<none>"
+"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
+"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
+"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
+"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
+"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
+"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/Cargo.toml Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,2 @@
+[workspace]
+members = ["hgcli"]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/README.rst Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,78 @@
+===================
+Mercurial Rust Code
+===================
+
+This directory contains various Rust code for the Mercurial project.
+
+The top-level ``Cargo.toml`` file defines a workspace containing
+all primary Mercurial crates.
+
+Building
+========
+
+To build the Rust components::
+
+ $ cargo build
+
+If you prefer a non-debug / release configuration::
+
+ $ cargo build --release
+
+Features
+--------
+
+The following Cargo features are available:
+
+localdev (default)
+ Produce files that work with an in-source-tree build.
+
+ In this mode, the build finds and uses a ``python2.7`` binary from
+ ``PATH``. The ``hg`` binary assumes it runs from ``rust/target/<target>hg``
+ and it finds Mercurial files at ``dirname($0)/../../../``.
+
+Build Mechanism
+---------------
+
+The produced ``hg`` binary is *bound* to a CPython installation. The
+binary links against and loads a CPython library that is discovered
+at build time (by a ``build.rs`` Cargo build script). The Python
+standard library defined by this CPython installation is also used.
+
+Finding the appropriate CPython installation to use is done by
+the ``python27-sys`` crate's ``build.rs``. Its search order is::
+
+1. ``PYTHON_SYS_EXECUTABLE`` environment variable.
+2. ``python`` executable on ``PATH``
+3. ``python2`` executable on ``PATH``
+4. ``python2.7`` executable on ``PATH``
+
+Additional verification of the found Python will be performed by our
+``build.rs`` to ensure it meets Mercurial's requirements.
+
+Details about the build-time configured Python are built into the
+produced ``hg`` binary. This means that a built ``hg`` binary is only
+suitable for a specific, well-defined role. These roles are controlled
+by Cargo features (see above).
+
+Running
+=======
+
+The ``hgcli`` crate produces an ``hg`` binary. You can run this binary
+via ``cargo run``::
+
+ $ cargo run --manifest-path hgcli/Cargo.toml
+
+Or directly::
+
+ $ target/debug/hg
+ $ target/release/hg
+
+You can also run the test harness with this binary::
+
+ $ ./run-tests.py --with-hg ../rust/target/debug/hg
+
+.. note::
+
+ Integration with the test harness is still preliminary. Remember to
+ ``cargo build`` after changes because the test harness doesn't yet
+ automatically build Rust code.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/Cargo.toml Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,35 @@
+[package]
+name = "hgcli"
+version = "0.1.0"
+authors = ["Gregory Szorc <gregory.szorc@gmail.com>"]
+license = "GPL-2.0"
+
+build = "build.rs"
+
+[[bin]]
+name = "hg"
+path = "src/main.rs"
+
+[features]
+# localdev: detect Python in PATH and use files from source checkout.
+default = ["localdev"]
+localdev = []
+
+[dependencies]
+libc = "0.2.34"
+
+# We currently use a custom build of cpython and python27-sys with the
+# following changes:
+# * GILGuard call of prepare_freethreaded_python() is removed.
+# TODO switch to official release when our changes are incorporated.
+[dependencies.cpython]
+version = "0.1"
+default-features = false
+features = ["python27-sys"]
+git = "https://github.com/indygreg/rust-cpython.git"
+rev = "c90d65cf84abfffce7ef54476bbfed56017a2f52"
+
+[dependencies.python27-sys]
+version = "0.1.2"
+git = "https://github.com/indygreg/rust-cpython.git"
+rev = "c90d65cf84abfffce7ef54476bbfed56017a2f52"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/build.rs Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,127 @@
+// build.rs -- Configure build environment for `hgcli` Rust package.
+//
+// Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use std::collections::HashMap;
+use std::env;
+use std::path::Path;
+use std::process::Command;
+
+struct PythonConfig {
+ python: String,
+ config: HashMap<String, String>,
+}
+
+fn get_python_config() -> PythonConfig {
+ // The python27-sys crate exports a Cargo variable defining the full
+ // path to the interpreter being used.
+ let python = env::var("DEP_PYTHON27_PYTHON_INTERPRETER").expect(
+ "Missing DEP_PYTHON27_PYTHON_INTERPRETER; bad python27-sys crate?",
+ );
+
+ if !Path::new(&python).exists() {
+ panic!(
+ "Python interpreter {} does not exist; this should never happen",
+ python
+ );
+ }
+
+ // This is a bit hacky but it gets the job done.
+ let separator = "SEPARATOR STRING";
+
+ let script = "import sysconfig; \
+c = sysconfig.get_config_vars(); \
+print('SEPARATOR STRING'.join('%s=%s' % i for i in c.items()))";
+
+ let mut command = Command::new(&python);
+ command.arg("-c").arg(script);
+
+ let out = command.output().unwrap();
+
+ if !out.status.success() {
+ panic!(
+ "python script failed: {}",
+ String::from_utf8_lossy(&out.stderr)
+ );
+ }
+
+ let stdout = String::from_utf8_lossy(&out.stdout);
+ let mut m = HashMap::new();
+
+ for entry in stdout.split(separator) {
+ let mut parts = entry.splitn(2, "=");
+ let key = parts.next().unwrap();
+ let value = parts.next().unwrap();
+ m.insert(String::from(key), String::from(value));
+ }
+
+ PythonConfig {
+ python: python,
+ config: m,
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+fn have_shared(config: &PythonConfig) -> bool {
+ match config.config.get("Py_ENABLE_SHARED") {
+ Some(value) => value == "1",
+ None => false,
+ }
+}
+
+#[cfg(target_os = "windows")]
+fn have_shared(config: &PythonConfig) -> bool {
+ use std::path::PathBuf;
+
+ // python27.dll should exist next to python2.7.exe.
+ let mut dll = PathBuf::from(&config.python);
+ dll.pop();
+ dll.push("python27.dll");
+
+ return dll.exists();
+}
+
+const REQUIRED_CONFIG_FLAGS: [&str; 2] = ["Py_USING_UNICODE", "WITH_THREAD"];
+
+fn main() {
+ let config = get_python_config();
+
+ println!("Using Python: {}", config.python);
+ println!("cargo:rustc-env=PYTHON_INTERPRETER={}", config.python);
+
+ let prefix = config.config.get("prefix").unwrap();
+
+ println!("Prefix: {}", prefix);
+
+ // TODO Windows builds don't expose these config flags. Figure out another
+ // way.
+ #[cfg(not(target_os = "windows"))]
+ for key in REQUIRED_CONFIG_FLAGS.iter() {
+ let result = match config.config.get(*key) {
+ Some(value) => value == "1",
+ None => false,
+ };
+
+ if !result {
+ panic!("Detected Python requires feature {}", key);
+ }
+ }
+
+ // We need a Python shared library.
+ if !have_shared(&config) {
+ panic!("Detected Python lacks a shared library, which is required");
+ }
+
+ let ucs4 = match config.config.get("Py_UNICODE_SIZE") {
+ Some(value) => value == "4",
+ None => false,
+ };
+
+ if !ucs4 {
+ #[cfg(not(target_os = "windows"))]
+ panic!("Detected Python doesn't support UCS-4 code points");
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/src/main.rs Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,233 @@
+// main.rs -- Main routines for `hg` program
+//
+// Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+extern crate libc;
+extern crate cpython;
+extern crate python27_sys;
+
+use cpython::{NoArgs, ObjectProtocol, PyModule, PyResult, Python};
+use libc::{c_char, c_int};
+
+use std::env;
+use std::path::PathBuf;
+use std::ffi::{CString, OsStr};
+#[cfg(target_family = "unix")]
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
+
+#[derive(Debug)]
+struct Environment {
+ _exe: PathBuf,
+ python_exe: PathBuf,
+ python_home: PathBuf,
+ mercurial_modules: PathBuf,
+}
+
+/// Run Mercurial locally from a source distribution or checkout.
+///
+/// hg is <srcdir>/rust/target/<target>/hg
+/// Python interpreter is detected by build script.
+/// Python home is relative to Python interpreter.
+/// Mercurial files are relative to hg binary, which is relative to source root.
+#[cfg(feature = "localdev")]
+fn get_environment() -> Environment {
+ let exe = env::current_exe().unwrap();
+
+ let mut mercurial_modules = exe.clone();
+ mercurial_modules.pop(); // /rust/target/<target>
+ mercurial_modules.pop(); // /rust/target
+ mercurial_modules.pop(); // /rust
+ mercurial_modules.pop(); // /
+
+ let python_exe: &'static str = env!("PYTHON_INTERPRETER");
+ let python_exe = PathBuf::from(python_exe);
+
+ let mut python_home = python_exe.clone();
+ python_home.pop();
+
+ // On Windows, python2.7.exe exists at the root directory of the Python
+ // install. Everywhere else, the Python install root is one level up.
+ if !python_exe.ends_with("python2.7.exe") {
+ python_home.pop();
+ }
+
+ Environment {
+ _exe: exe.clone(),
+ python_exe: python_exe,
+ python_home: python_home,
+ mercurial_modules: mercurial_modules.to_path_buf(),
+ }
+}
+
+// On UNIX, platform string is just bytes and should not contain NUL.
+#[cfg(target_family = "unix")]
+fn cstring_from_os<T: AsRef<OsStr>>(s: T) -> CString {
+ CString::new(s.as_ref().as_bytes()).unwrap()
+}
+
+// TODO convert to ANSI characters?
+#[cfg(target_family = "windows")]
+fn cstring_from_os<T: AsRef<OsStr>>(s: T) -> CString {
+ CString::new(s.as_ref().to_str().unwrap()).unwrap()
+}
+
+// On UNIX, argv starts as an array of char*. So it is easy to convert
+// to C strings.
+#[cfg(target_family = "unix")]
+fn args_to_cstrings() -> Vec<CString> {
+ env::args_os()
+ .map(|a| CString::new(a.into_vec()).unwrap())
+ .collect()
+}
+
+// TODO Windows support is incomplete. We should either use env::args_os()
+// (or call into GetCommandLineW() + CommandLinetoArgvW()), convert these to
+// PyUnicode instances, and pass these into Python/Mercurial outside the
+// standard PySys_SetArgvEx() mechanism. This will allow us to preserve the
+// raw bytes (since PySys_SetArgvEx() is based on char* and can drop wchar
+// data.
+//
+// For now, we use env::args(). This will choke on invalid UTF-8 arguments.
+// But it is better than nothing.
+#[cfg(target_family = "windows")]
+fn args_to_cstrings() -> Vec<CString> {
+ env::args().map(|a| CString::new(a).unwrap()).collect()
+}
+
+fn set_python_home(env: &Environment) {
+ let raw = cstring_from_os(&env.python_home).into_raw();
+ unsafe {
+ python27_sys::Py_SetPythonHome(raw);
+ }
+}
+
+fn update_encoding(_py: Python, _sys_mod: &PyModule) {
+ // Call sys.setdefaultencoding("undefined") if HGUNICODEPEDANTRY is set.
+ let pedantry = env::var("HGUNICODEPEDANTRY").is_ok();
+
+ if pedantry {
+ // site.py removes the sys.setdefaultencoding attribute. So we need
+ // to reload the module to get a handle on it. This is a lesser
+ // used feature and we'll support this later.
+ // TODO support this
+ panic!("HGUNICODEPEDANTRY is not yet supported");
+ }
+}
+
+fn update_modules_path(env: &Environment, py: Python, sys_mod: &PyModule) {
+ let sys_path = sys_mod.get(py, "path").unwrap();
+ sys_path
+ .call_method(py, "insert", (0, env.mercurial_modules.to_str()), None)
+ .expect("failed to update sys.path to location of Mercurial modules");
+}
+
+fn run() -> Result<(), i32> {
+ let env = get_environment();
+
+ //println!("{:?}", env);
+
+ // Tell Python where it is installed.
+ set_python_home(&env);
+
+ // Set program name. The backing memory needs to live for the duration of the
+ // interpreter.
+ //
+ // TODO consider storing this in a static or associating with lifetime of
+ // the Python interpreter.
+ //
+ // Yes, we use the path to the Python interpreter not argv[0] here. The
+ // reason is because Python uses the given path to find the location of
+ // Python files. Apparently we could define our own ``Py_GetPath()``
+ // implementation. But this may require statically linking Python, which is
+ // not desirable.
+ let program_name = cstring_from_os(&env.python_exe).as_ptr();
+ unsafe {
+ python27_sys::Py_SetProgramName(program_name as *mut i8);
+ }
+
+ unsafe {
+ python27_sys::Py_Initialize();
+ }
+
+ // https://docs.python.org/2/c-api/init.html#c.PySys_SetArgvEx has important
+ // usage information about PySys_SetArgvEx:
+ //
+ // * It says the first argument should be the script that is being executed.
+ // If not a script, it can be empty. We are definitely not a script.
+ // However, parts of Mercurial do look at sys.argv[0]. So we need to set
+ // something here.
+ //
+ // * When embedding Python, we should use ``PySys_SetArgvEx()`` and set
+ // ``updatepath=0`` for security reasons. Essentially, Python's default
+ // logic will treat an empty argv[0] in a manner that could result in
+ // sys.path picking up directories it shouldn't and this could lead to
+ // loading untrusted modules.
+
+ // env::args() will panic if it sees a non-UTF-8 byte sequence. And
+ // Mercurial supports arbitrary encodings of input data. So we need to
+ // use OS-specific mechanisms to get the raw bytes without UTF-8
+ // interference.
+ let args = args_to_cstrings();
+ let argv: Vec<*const c_char> = args.iter().map(|a| a.as_ptr()).collect();
+
+ unsafe {
+ python27_sys::PySys_SetArgvEx(args.len() as c_int, argv.as_ptr() as *mut *mut i8, 0);
+ }
+
+ let result;
+ {
+ // These need to be dropped before we call Py_Finalize(). Hence the
+ // block.
+ let gil = Python::acquire_gil();
+ let py = gil.python();
+
+ // Mercurial code could call sys.exit(), which will call exit()
+ // itself. So this may not return.
+ // TODO this may cause issues on Windows due to the CRT mismatch.
+ // Investigate if we can intercept sys.exit() or SystemExit() to
+ // ensure we handle process exit.
+ result = match run_py(&env, py) {
+ // Print unhandled exceptions and exit code 255, as this is what
+ // `python` does.
+ Err(err) => {
+ err.print(py);
+ Err(255)
+ }
+ Ok(()) => Ok(()),
+ };
+ }
+
+ unsafe {
+ python27_sys::Py_Finalize();
+ }
+
+ result
+}
+
+fn run_py(env: &Environment, py: Python) -> PyResult<()> {
+ let sys_mod = py.import("sys").unwrap();
+
+ update_encoding(py, &sys_mod);
+ update_modules_path(&env, py, &sys_mod);
+
+ // TODO consider a better error message on failure to import.
+ let demand_mod = py.import("hgdemandimport")?;
+ demand_mod.call(py, "enable", NoArgs, None)?;
+
+ let dispatch_mod = py.import("mercurial.dispatch")?;
+ dispatch_mod.call(py, "run", NoArgs, None)?;
+
+ Ok(())
+}
+
+fn main() {
+ let exit_code = match run() {
+ Err(err) => err,
+ Ok(()) => 0,
+ };
+
+ std::process::exit(exit_code);
+}
--- a/setup.py Mon Jan 08 16:07:51 2018 -0800
+++ b/setup.py Mon Jan 22 17:53:02 2018 -0500
@@ -29,12 +29,16 @@
if sys.version_info[0] >= 3:
printf = eval('print')
libdir_escape = 'unicode_escape'
+ def sysstr(s):
+ return s.decode('latin-1')
else:
libdir_escape = 'string_escape'
def printf(*args, **kwargs):
f = kwargs.get('file', sys.stdout)
end = kwargs.get('end', '\n')
f.write(b' '.join(args) + end)
+ def sysstr(s):
+ return s
# Attempt to guide users to a modern pip - this means that 2.6 users
# should have a chance of getting a 4.2 release, and when we ratchet
@@ -136,6 +140,18 @@
from distutils.sysconfig import get_python_inc, get_config_var
from distutils.version import StrictVersion
+def write_if_changed(path, content):
+ """Write content to a file iff the content hasn't changed."""
+ if os.path.exists(path):
+ with open(path, 'rb') as fh:
+ current = fh.read()
+ else:
+ current = b''
+
+ if current != content:
+ with open(path, 'wb') as fh:
+ fh.write(content)
+
scripts = ['hg']
if os.name == 'nt':
# We remove hg.bat if we are able to build hg.exe.
@@ -283,8 +299,8 @@
if os.path.isdir('.hg'):
hg = findhg()
cmd = ['log', '-r', '.', '--template', '{tags}\n']
- numerictags = [t for t in hg.run(cmd).split() if t[0:1].isdigit()]
- hgid = hg.run(['id', '-i']).strip()
+ numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()]
+ hgid = sysstr(hg.run(['id', '-i'])).strip()
if not hgid:
# Bail out if hg is having problems interacting with this repository,
# rather than falling through and producing a bogus version number.
@@ -297,7 +313,7 @@
version += '+'
else: # no tag found
ltagcmd = ['parents', '--template', '{latesttag}']
- ltag = hg.run(ltagcmd)
+ ltag = sysstr(hg.run(ltagcmd))
changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
changessince = len(hg.run(changessincecmd).splitlines())
version = '%s+%s-%s' % (ltag, changessince, hgid)
@@ -317,9 +333,14 @@
version = kw.get('node', '')[:12]
if version:
- with open("mercurial/__version__.py", "w") as f:
- f.write('# this file is autogenerated by setup.py\n')
- f.write('version = "%s"\n' % version)
+ versionb = version
+ if not isinstance(versionb, bytes):
+ versionb = versionb.encode('ascii')
+
+ write_if_changed('mercurial/__version__.py', b''.join([
+ b'# this file is autogenerated by setup.py\n'
+ b'version = "%s"\n' % versionb,
+ ]))
try:
oldpolicy = os.environ.get('HGMODULEPOLICY', None)
@@ -478,9 +499,13 @@
modulepolicy = 'allow'
else:
modulepolicy = 'c'
- with open(os.path.join(basepath, '__modulepolicy__.py'), "w") as f:
- f.write('# this file is autogenerated by setup.py\n')
- f.write('modulepolicy = b"%s"\n' % modulepolicy)
+
+ content = b''.join([
+ b'# this file is autogenerated by setup.py\n',
+ b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
+ ])
+ write_if_changed(os.path.join(basepath, '__modulepolicy__.py'),
+ content)
build_py.run(self)
@@ -767,7 +792,7 @@
'mercurial.thirdparty.attr',
'hgext', 'hgext.convert', 'hgext.fsmonitor',
'hgext.fsmonitor.pywatchman', 'hgext.highlight',
- 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd',
+ 'hgext.largefiles', 'hgext.lfs', 'hgext.zeroconf', 'hgext3rd',
'hgdemandimport']
common_depends = ['mercurial/bitmanipulation.h',
@@ -910,7 +935,7 @@
if py2exeloaded:
extra['console'] = [
{'script':'hg',
- 'copyright':'Copyright (C) 2005-2017 Matt Mackall and others',
+ 'copyright':'Copyright (C) 2005-2018 Matt Mackall and others',
'product_version':version}]
# sub command of 'build' because 'py2exe' does not handle sub_commands
build.sub_commands.insert(0, ('build_hgextindex', None))
--- a/tests/autodiff.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/autodiff.py Mon Jan 22 17:53:02 2018 -0500
@@ -12,33 +12,33 @@
cmdtable = {}
command = registrar.command(cmdtable)
-@command('autodiff',
- [('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')],
- '[OPTION]... [FILE]...')
+@command(b'autodiff',
+ [(b'', b'git', b'', b'git upgrade mode (yes/no/auto/warn/abort)')],
+ b'[OPTION]... [FILE]...')
def autodiff(ui, repo, *pats, **opts):
diffopts = patch.difffeatureopts(ui, opts)
- git = opts.get('git', 'no')
+ git = opts.get(b'git', b'no')
brokenfiles = set()
losedatafn = None
- if git in ('yes', 'no'):
- diffopts.git = git == 'yes'
+ if git in (b'yes', b'no'):
+ diffopts.git = git == b'yes'
diffopts.upgrade = False
- elif git == 'auto':
+ elif git == b'auto':
diffopts.git = False
diffopts.upgrade = True
- elif git == 'warn':
+ elif git == b'warn':
diffopts.git = False
diffopts.upgrade = True
def losedatafn(fn=None, **kwargs):
brokenfiles.add(fn)
return True
- elif git == 'abort':
+ elif git == b'abort':
diffopts.git = False
diffopts.upgrade = True
def losedatafn(fn=None, **kwargs):
- raise error.Abort('losing data for %s' % fn)
+ raise error.Abort(b'losing data for %s' % fn)
else:
- raise error.Abort('--git must be yes, no or auto')
+ raise error.Abort(b'--git must be yes, no or auto')
node1, node2 = scmutil.revpair(repo, [])
m = scmutil.match(repo[node2], pats, opts)
@@ -47,4 +47,4 @@
for chunk in it:
ui.write(chunk)
for fn in sorted(brokenfiles):
- ui.write(('data lost for: %s\n' % fn))
+ ui.write((b'data lost for: %s\n' % fn))
--- a/tests/blackbox-readonly-dispatch.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/blackbox-readonly-dispatch.py Mon Jan 22 17:53:02 2018 -0500
@@ -18,8 +18,8 @@
f = open('foo', 'wb')
f.write('foo\n')
f.close()
-testdispatch("add foo")
-testdispatch("commit -m commit1 -d 2000-01-01 foo")
+testdispatch("--debug add foo")
+testdispatch("--debug commit -m commit1 -d 2000-01-01 foo")
# append to file 'foo' and commit
f = open('foo', 'ab')
@@ -29,8 +29,8 @@
os.rmdir(".hg/blackbox.log")
# replace it with the real blackbox.log file
os.rename(".hg/blackbox.log-", ".hg/blackbox.log")
-testdispatch("commit -m commit2 -d 2000-01-02 foo")
+testdispatch("--debug commit -m commit2 -d 2000-01-02 foo")
# check 88803a69b24 (fancyopts modified command table)
-testdispatch("log -r 0")
-testdispatch("log -r tip")
+testdispatch("--debug log -r 0")
+testdispatch("--debug log -r tip")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/common-pattern.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,161 @@
+# common patterns in test at can safely be replaced
+from __future__ import absolute_import
+
+import os
+
+substitutions = [
+ # list of possible compressions
+ (br'(zstd,)?zlib,none,bzip2',
+ br'$USUAL_COMPRESSIONS$'
+ ),
+ # capabilities sent through http
+ (br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
+ br'bookmarks%250A'
+ br'changegroup%253D01%252C02%250A'
+ br'digests%253Dmd5%252Csha1%252Csha512%250A'
+ br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
+ br'hgtagsfnodes%250A'
+ br'listkeys%250A'
+ br'phases%253Dheads%250A'
+ br'pushkey%250A'
+ br'remote-changegroup%253Dhttp%252Chttps%250A'
+ br'stream%253Dv2',
+ # (the replacement patterns)
+ br'$USUAL_BUNDLE_CAPS$'
+ ),
+ (br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
+ br'bookmarks%250A'
+ br'changegroup%253D01%252C02%250A'
+ br'digests%253Dmd5%252Csha1%252Csha512%250A'
+ br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
+ br'hgtagsfnodes%250A'
+ br'listkeys%250A'
+ br'phases%253Dheads%250A'
+ br'pushkey%250A'
+ br'remote-changegroup%253Dhttp%252Chttps',
+ # (the replacement patterns)
+ br'$USUAL_BUNDLE_CAPS_SERVER$'
+ ),
+ # bundle2 capabilities sent through ssh
+ (br'bundle2=HG20%0A'
+ br'bookmarks%0A'
+ br'changegroup%3D01%2C02%0A'
+ br'digests%3Dmd5%2Csha1%2Csha512%0A'
+ br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
+ br'hgtagsfnodes%0A'
+ br'listkeys%0A'
+ br'phases%3Dheads%0A'
+ br'pushkey%0A'
+ br'remote-changegroup%3Dhttp%2Chttps%0A'
+ br'stream%3Dv2',
+ # (replacement patterns)
+ br'$USUAL_BUNDLE2_CAPS$'
+ ),
+ # bundle2 capabilities advertised by the server
+ (br'bundle2=HG20%0A'
+ br'bookmarks%0A'
+ br'changegroup%3D01%2C02%0A'
+ br'digests%3Dmd5%2Csha1%2Csha512%0A'
+ br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
+ br'hgtagsfnodes%0A'
+ br'listkeys%0A'
+ br'phases%3Dheads%0A'
+ br'pushkey%0A'
+ br'remote-changegroup%3Dhttp%2Chttps',
+ # (replacement patterns)
+ br'$USUAL_BUNDLE2_CAPS_SERVER$'
+ ),
+ # HTTP log dates
+ (br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] "GET',
+ br' - - [$LOGDATE$] "GET'
+ ),
+ # Windows has an extra '/' in the following lines that get globbed away:
+ # pushing to file:/*/$TESTTMP/r2 (glob)
+ # comparing with file:/*/$TESTTMP/r2 (glob)
+ # sub/maybelarge.dat: largefile 34..9c not available from
+ # file:/*/$TESTTMP/largefiles-repo (glob)
+ (br'(.*file:/)/?(/\$TESTTMP.*)',
+ lambda m: m.group(1) + b'*' + m.group(2) + b' (glob)'
+ ),
+]
+
+# Various platform error strings, keyed on a common replacement string
+_errors = {
+ br'$ENOENT$': (
+ # strerror()
+ br'No such file or directory',
+
+ # FormatMessage(ERROR_FILE_NOT_FOUND)
+ br'The system cannot find the file specified',
+ ),
+ br'$ENOTDIR$': (
+ # strerror()
+ br'Not a directory',
+
+ # FormatMessage(ERROR_PATH_NOT_FOUND)
+ br'The system cannot find the path specified',
+ ),
+ br'$ECONNRESET$': (
+ # strerror()
+ br'Connection reset by peer',
+
+ # FormatMessage(WSAECONNRESET)
+ br'An existing connection was forcibly closed by the remote host',
+ ),
+ br'$EADDRINUSE$': (
+ # strerror()
+ br'Address already in use',
+
+ # FormatMessage(WSAEADDRINUSE)
+ br'Only one usage of each socket address'
+ br' \(protocol/network address/port\) is normally permitted',
+ ),
+}
+
+for replace, msgs in _errors.items():
+ substitutions.extend((m, replace) for m in msgs)
+
+# Output lines on Windows that can be autocorrected for '\' vs '/' path
+# differences.
+_winpathfixes = [
+ # cloning subrepo s\ss from $TESTTMP/t/s/ss
+ # cloning subrepo foo\bar from http://localhost:$HGPORT/foo/bar
+ br'(?m)^cloning subrepo \S+\\.*',
+
+ # pulling from $TESTTMP\issue1852a
+ br'(?m)^pulling from \$TESTTMP\\.*',
+
+ # pushing to $TESTTMP\a
+ br'(?m)^pushing to \$TESTTMP\\.*',
+
+ # pushing subrepo s\ss to $TESTTMP/t/s/ss
+ br'(?m)^pushing subrepo \S+\\\S+ to.*',
+
+ # moving d1\d11\a1 to d3/d11/a1
+ br'(?m)^moving \S+\\.*',
+
+ # d1\a: not recording move - dummy does not exist
+ br'\S+\\\S+: not recording move .+',
+
+ # reverting s\a
+ br'(?m)^reverting (?!subrepo ).*\\.*',
+
+ # saved backup bundle to
+ # $TESTTMP\test\.hg\strip-backup/443431ffac4f-2fc5398a-backup.hg
+ br'(?m)^saved backup bundle to \$TESTTMP.*\.hg',
+
+ # no changes made to subrepo s\ss since last push to ../tcc/s/ss
+ br'(?m)^no changes made to subrepo \S+\\\S+ since.*',
+
+ # changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing
+ # $TESTTMP\largefiles-repo-hg\.hg\largefiles\76..38
+ br'(?m)^changeset .* references (corrupted|missing) \$TESTTMP\\.*',
+
+ # stuff/maybelarge.dat: largefile 76..38 not available from
+ # file:/*/$TESTTMP\largefiles-repo (glob)
+ br'.*: largefile \S+ not available from file:/\*/.+',
+]
+
+if os.name == 'nt':
+ substitutions.extend([(s, lambda match: match.group().replace(b'\\', b'/'))
+ for s in _winpathfixes])
--- a/tests/dummysmtpd.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/dummysmtpd.py Mon Jan 22 17:53:02 2018 -0500
@@ -9,6 +9,7 @@
import smtpd
import ssl
import sys
+import traceback
from mercurial import (
server,
@@ -27,6 +28,15 @@
def process_message(self, peer, mailfrom, rcpttos, data):
log('%s from=%s to=%s\n' % (peer[0], mailfrom, ', '.join(rcpttos)))
+ def handle_error(self):
+ # On Windows, a bad SSL connection sometimes generates a WSAECONNRESET.
+ # The default handler will shutdown this server, and then both the
+ # current connection and subsequent ones fail on the client side with
+ # "No connection could be made because the target machine actively
+ # refused it". If we eat the error, then the client properly aborts in
+ # the expected way, and the server is available for subsequent requests.
+ traceback.print_exc()
+
class dummysmtpsecureserver(dummysmtpserver):
def __init__(self, localaddr, certfile):
dummysmtpserver.__init__(self, localaddr)
--- a/tests/dummyssh Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/dummyssh Mon Jan 22 17:53:02 2018 -0500
@@ -13,9 +13,9 @@
os.environ["SSH_CLIENT"] = "%s 1 2" % os.environ.get('LOCALIP', '127.0.0.1')
log = open("dummylog", "ab")
-log.write("Got arguments")
+log.write(b"Got arguments")
for i, arg in enumerate(sys.argv[1:]):
- log.write(" %d:%s" % (i + 1, arg))
+ log.write(b" %d:%s" % (i + 1, arg))
log.write("\n")
log.close()
hgcmd = sys.argv[2]
--- a/tests/f Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/f Mon Jan 22 17:53:02 2018 -0500
@@ -59,7 +59,7 @@
if isfile:
if opts.type:
facts.append('file')
- if opts.hexdump or opts.dump or opts.md5:
+ if any((opts.hexdump, opts.dump, opts.md5, opts.sha1, opts.sha256)):
content = open(f, 'rb').read()
elif islink:
if opts.type:
@@ -95,6 +95,9 @@
if opts.sha1 and content is not None:
h = hashlib.sha1(content)
facts.append('sha1=%s' % h.hexdigest()[:opts.bytes])
+ if opts.sha256 and content is not None:
+ h = hashlib.sha256(content)
+ facts.append('sha256=%s' % h.hexdigest()[:opts.bytes])
if isstdin:
outfile.write(b', '.join(facts) + b'\n')
elif facts:
@@ -150,6 +153,8 @@
help="recurse into directories")
parser.add_option("-S", "--sha1", action="store_true",
help="show sha1 hash of the content")
+ parser.add_option("", "--sha256", action="store_true",
+ help="show sha256 hash of the content")
parser.add_option("-M", "--md5", action="store_true",
help="show md5 hash of the content")
parser.add_option("-D", "--dump", action="store_true",
--- a/tests/flagprocessorext.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/flagprocessorext.py Mon Jan 22 17:53:02 2018 -0500
@@ -58,7 +58,7 @@
def noopaddrevision(orig, self, text, transaction, link, p1, p2,
cachedelta=None, node=None,
flags=revlog.REVIDX_DEFAULT_FLAGS):
- if '[NOOP]' in text:
+ if b'[NOOP]' in text:
flags |= REVIDX_NOOP
return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
node=node, flags=flags)
@@ -66,7 +66,7 @@
def b64addrevision(orig, self, text, transaction, link, p1, p2,
cachedelta=None, node=None,
flags=revlog.REVIDX_DEFAULT_FLAGS):
- if '[BASE64]' in text:
+ if b'[BASE64]' in text:
flags |= REVIDX_BASE64
return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
node=node, flags=flags)
@@ -74,7 +74,7 @@
def gzipaddrevision(orig, self, text, transaction, link, p1, p2,
cachedelta=None, node=None,
flags=revlog.REVIDX_DEFAULT_FLAGS):
- if '[GZIP]' in text:
+ if b'[GZIP]' in text:
flags |= REVIDX_GZIP
return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
node=node, flags=flags)
@@ -84,7 +84,7 @@
flags=revlog.REVIDX_DEFAULT_FLAGS):
# This addrevision wrapper is meant to add a flag we will not have
# transforms registered for, ensuring we handle this error case.
- if '[FAIL]' in text:
+ if b'[FAIL]' in text:
flags |= REVIDX_FAIL
return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
node=node, flags=flags)
--- a/tests/get-with-headers.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/get-with-headers.py Mon Jan 22 17:53:02 2018 -0500
@@ -5,6 +5,7 @@
from __future__ import absolute_import, print_function
+import argparse
import json
import os
import sys
@@ -22,25 +23,27 @@
except ImportError:
pass
-twice = False
-if '--twice' in sys.argv:
- sys.argv.remove('--twice')
- twice = True
-headeronly = False
-if '--headeronly' in sys.argv:
- sys.argv.remove('--headeronly')
- headeronly = True
-formatjson = False
-if '--json' in sys.argv:
- sys.argv.remove('--json')
- formatjson = True
+parser = argparse.ArgumentParser()
+parser.add_argument('--twice', action='store_true')
+parser.add_argument('--headeronly', action='store_true')
+parser.add_argument('--json', action='store_true')
+parser.add_argument('--hgproto')
+parser.add_argument('--requestheader', nargs='*', default=[],
+ help='Send an additional HTTP request header. Argument '
+ 'value is <header>=<value>')
+parser.add_argument('--bodyfile',
+ help='Write HTTP response body to a file')
+parser.add_argument('host')
+parser.add_argument('path')
+parser.add_argument('show', nargs='*')
-hgproto = None
-if '--hgproto' in sys.argv:
- idx = sys.argv.index('--hgproto')
- hgproto = sys.argv[idx + 1]
- sys.argv.pop(idx)
- sys.argv.pop(idx)
+args = parser.parse_args()
+
+twice = args.twice
+headeronly = args.headeronly
+formatjson = args.json
+hgproto = args.hgproto
+requestheaders = args.requestheader
tag = None
def request(host, path, show):
@@ -52,6 +55,10 @@
if hgproto:
headers['X-HgProto-1'] = hgproto
+ for header in requestheaders:
+ key, value = header.split('=', 1)
+ headers[key] = value
+
conn = httplib.HTTPConnection(host)
conn.request("GET", '/' + path, None, headers)
response = conn.getresponse()
@@ -66,6 +73,11 @@
print()
data = response.read()
+ if args.bodyfile:
+ bodyfh = open(args.bodyfile, 'wb')
+ else:
+ bodyfh = sys.stdout
+
# Pretty print JSON. This also has the beneficial side-effect
# of verifying emitted JSON is well-formed.
if formatjson:
@@ -74,18 +86,22 @@
data = json.loads(data)
lines = json.dumps(data, sort_keys=True, indent=2).splitlines()
for line in lines:
- print(line.rstrip())
+ bodyfh.write(line.rstrip())
+ bodyfh.write(b'\n')
else:
- sys.stdout.write(data)
+ bodyfh.write(data)
+
+ if args.bodyfile:
+ bodyfh.close()
if twice and response.getheader('ETag', None):
tag = response.getheader('ETag')
return response.status
-status = request(sys.argv[1], sys.argv[2], sys.argv[3:])
+status = request(args.host, args.path, args.show)
if twice:
- status = request(sys.argv[1], sys.argv[2], sys.argv[3:])
+ status = request(args.host, args.path, args.show)
if 200 <= status <= 305:
sys.exit(0)
--- a/tests/hghave.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/hghave.py Mon Jan 22 17:53:02 2018 -0500
@@ -284,6 +284,17 @@
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
+# https://github.com/git-lfs/lfs-test-server
+@check("lfs-test-server", "git-lfs test server")
+def has_lfsserver():
+ exe = 'lfs-test-server'
+ if has_windows():
+ exe = 'lfs-test-server.exe'
+ return any(
+ os.access(os.path.join(path, exe), os.X_OK)
+ for path in os.environ["PATH"].split(os.pathsep)
+ )
+
@checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
def has_git_range(v):
major, minor = v.split('.')[0:2]
@@ -444,6 +455,10 @@
return matchoutput("clang-format --help",
br"^OVERVIEW: A tool to format C/C\+\+[^ ]+ code.")
+@check("jshint", "JSHint static code analysis tool")
+def has_jshint():
+ return matchoutput("jshint --version 2>&1", br"jshint v")
+
@check("pygments", "Pygments source highlighting library")
def has_pygments():
try:
@@ -685,3 +700,11 @@
return True
except ImportError:
return False
+
+@check("clang-libfuzzer", "clang new enough to include libfuzzer")
+def has_clang_libfuzzer():
+ mat = matchoutput('clang --version', 'clang version (\d)')
+ if mat:
+ # libfuzzer is new in clang 6
+ return int(mat.group(1)) > 5
+ return False
--- a/tests/list-tree.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/list-tree.py Mon Jan 22 17:53:02 2018 -0500
@@ -24,4 +24,4 @@
else:
yield p
-print('\n'.join(sorted(gather())))
+print('\n'.join(sorted(gather(), key=lambda x: x.replace(os.path.sep, '/'))))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/logexceptions.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,73 @@
+# logexceptions.py - Write files containing info about Mercurial exceptions
+#
+# Copyright 2017 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import inspect
+import os
+import sys
+import traceback
+import uuid
+
+from mercurial import (
+ dispatch,
+ extensions,
+)
+
+def handleexception(orig, ui):
+ res = orig(ui)
+
+ if not ui.environ.get(b'HGEXCEPTIONSDIR'):
+ return res
+
+ dest = os.path.join(ui.environ[b'HGEXCEPTIONSDIR'],
+ str(uuid.uuid4()).encode('ascii'))
+
+ exc_type, exc_value, exc_tb = sys.exc_info()
+
+ stack = []
+ tb = exc_tb
+ while tb:
+ stack.append(tb)
+ tb = tb.tb_next
+ stack.reverse()
+
+ hgframe = 'unknown'
+ hgline = 'unknown'
+
+ # Find the first Mercurial frame in the stack.
+ for tb in stack:
+ mod = inspect.getmodule(tb)
+ if not mod.__name__.startswith(('hg', 'mercurial')):
+ continue
+
+ frame = tb.tb_frame
+
+ try:
+ with open(inspect.getsourcefile(tb), 'r') as fh:
+ hgline = fh.readlines()[frame.f_lineno - 1].strip()
+ except (IndexError, OSError):
+ pass
+
+ hgframe = '%s:%d' % (frame.f_code.co_filename, frame.f_lineno)
+ break
+
+ primary = traceback.extract_tb(exc_tb)[-1]
+ primaryframe = '%s:%d' % (primary.filename, primary.lineno)
+
+ with open(dest, 'wb') as fh:
+ parts = [
+ str(exc_value),
+ primaryframe,
+ hgframe,
+ hgline,
+ ]
+ fh.write(b'\0'.join(p.encode('utf-8', 'replace') for p in parts))
+
+def extsetup(ui):
+ extensions.wrapfunction(dispatch, 'handlecommandexception',
+ handleexception)
--- a/tests/revlog-formatv0.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/revlog-formatv0.py Mon Jan 22 17:53:02 2018 -0500
@@ -22,27 +22,27 @@
import sys
files = [
- ('formatv0/.hg/00changelog.i',
- '000000000000004400000000000000000000000000000000000000'
- '000000000000000000000000000000000000000000000000000000'
- '0000a1ef0b125355d27765928be600cfe85784284ab3'),
- ('formatv0/.hg/00changelog.d',
- '756163613935613961356635353036303562366138343738336237'
- '61623536363738616436356635380a757365720a3020300a656d70'
- '74790a0a656d7074792066696c65'),
- ('formatv0/.hg/00manifest.i',
- '000000000000003000000000000000000000000000000000000000'
- '000000000000000000000000000000000000000000000000000000'
- '0000aca95a9a5f550605b6a84783b7ab56678ad65f58'),
- ('formatv0/.hg/00manifest.d',
- '75656d707479006238306465356431333837353835343163356630'
- '35323635616431343461623966613836643164620a'),
- ('formatv0/.hg/data/empty.i',
- '000000000000000000000000000000000000000000000000000000'
- '000000000000000000000000000000000000000000000000000000'
- '0000b80de5d138758541c5f05265ad144ab9fa86d1db'),
- ('formatv0/.hg/data/empty.d',
- ''),
+ (b'formatv0/.hg/00changelog.i',
+ b'000000000000004400000000000000000000000000000000000000'
+ b'000000000000000000000000000000000000000000000000000000'
+ b'0000a1ef0b125355d27765928be600cfe85784284ab3'),
+ (b'formatv0/.hg/00changelog.d',
+ b'756163613935613961356635353036303562366138343738336237'
+ b'61623536363738616436356635380a757365720a3020300a656d70'
+ b'74790a0a656d7074792066696c65'),
+ (b'formatv0/.hg/00manifest.i',
+ b'000000000000003000000000000000000000000000000000000000'
+ b'000000000000000000000000000000000000000000000000000000'
+ b'0000aca95a9a5f550605b6a84783b7ab56678ad65f58'),
+ (b'formatv0/.hg/00manifest.d',
+ b'75656d707479006238306465356431333837353835343163356630'
+ b'35323635616431343461623966613836643164620a'),
+ (b'formatv0/.hg/data/empty.i',
+ b'000000000000000000000000000000000000000000000000000000'
+ b'000000000000000000000000000000000000000000000000000000'
+ b'0000b80de5d138758541c5f05265ad144ab9fa86d1db'),
+ (b'formatv0/.hg/data/empty.d',
+ b''),
]
def makedirs(name):
--- a/tests/run-tests.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/run-tests.py Mon Jan 22 17:53:02 2018 -0500
@@ -45,11 +45,12 @@
from __future__ import absolute_import, print_function
+import argparse
+import collections
import difflib
import distutils.version as version
import errno
import json
-import optparse
import os
import random
import re
@@ -296,122 +297,132 @@
def getparser():
"""Obtain the OptionParser used by the CLI."""
- parser = optparse.OptionParser("%prog [options] [tests]")
-
- # keep these sorted
- parser.add_option("--blacklist", action="append",
+ parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
+
+ selection = parser.add_argument_group('Test Selection')
+ selection.add_argument('--allow-slow-tests', action='store_true',
+ help='allow extremely slow tests')
+ selection.add_argument("--blacklist", action="append",
help="skip tests listed in the specified blacklist file")
- parser.add_option("--whitelist", action="append",
+ selection.add_argument("--changed",
+ help="run tests that are changed in parent rev or working directory")
+ selection.add_argument("-k", "--keywords",
+ help="run tests matching keywords")
+ selection.add_argument("-r", "--retest", action="store_true",
+ help = "retest failed tests")
+ selection.add_argument("--test-list", action="append",
+ help="read tests to run from the specified file")
+ selection.add_argument("--whitelist", action="append",
help="always run tests listed in the specified whitelist file")
- parser.add_option("--test-list", action="append",
- help="read tests to run from the specified file")
- parser.add_option("--changed", type="string",
- help="run tests that are changed in parent rev or working directory")
- parser.add_option("-C", "--annotate", action="store_true",
- help="output files annotated with coverage")
- parser.add_option("-c", "--cover", action="store_true",
- help="print a test coverage report")
- parser.add_option("--color", choices=["always", "auto", "never"],
- default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
- help="colorisation: always|auto|never (default: auto)")
- parser.add_option("-d", "--debug", action="store_true",
+ selection.add_argument('tests', metavar='TESTS', nargs='*',
+ help='Tests to run')
+
+ harness = parser.add_argument_group('Test Harness Behavior')
+ harness.add_argument('--bisect-repo',
+ metavar='bisect_repo',
+ help=("Path of a repo to bisect. Use together with "
+ "--known-good-rev"))
+ harness.add_argument("-d", "--debug", action="store_true",
help="debug mode: write output of test scripts to console"
" rather than capturing and diffing it (disables timeout)")
- parser.add_option("-f", "--first", action="store_true",
+ harness.add_argument("-f", "--first", action="store_true",
help="exit on the first test failure")
- parser.add_option("-H", "--htmlcov", action="store_true",
- help="create an HTML report of the coverage of the files")
- parser.add_option("-i", "--interactive", action="store_true",
+ harness.add_argument("-i", "--interactive", action="store_true",
help="prompt to accept changed output")
- parser.add_option("-j", "--jobs", type="int",
+ harness.add_argument("-j", "--jobs", type=int,
help="number of jobs to run in parallel"
" (default: $%s or %d)" % defaults['jobs'])
- parser.add_option("--keep-tmpdir", action="store_true",
+ harness.add_argument("--keep-tmpdir", action="store_true",
help="keep temporary directory after running tests")
- parser.add_option("-k", "--keywords",
- help="run tests matching keywords")
- parser.add_option("--list-tests", action="store_true",
+ harness.add_argument('--known-good-rev',
+ metavar="known_good_rev",
+ help=("Automatically bisect any failures using this "
+ "revision as a known-good revision."))
+ harness.add_argument("--list-tests", action="store_true",
help="list tests instead of running them")
- parser.add_option("-l", "--local", action="store_true",
+ harness.add_argument("--loop", action="store_true",
+ help="loop tests repeatedly")
+ harness.add_argument('--random', action="store_true",
+ help='run tests in random order')
+ harness.add_argument("-p", "--port", type=int,
+ help="port on which servers should listen"
+ " (default: $%s or %d)" % defaults['port'])
+ harness.add_argument('--profile-runner', action='store_true',
+ help='run statprof on run-tests')
+ harness.add_argument("-R", "--restart", action="store_true",
+ help="restart at last error")
+ harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
+ help="run each test N times (default=1)", default=1)
+ harness.add_argument("--shell",
+ help="shell to use (default: $%s or %s)" % defaults['shell'])
+ harness.add_argument('--showchannels', action='store_true',
+ help='show scheduling channels')
+ harness.add_argument("--slowtimeout", type=int,
+ help="kill errant slow tests after SLOWTIMEOUT seconds"
+ " (default: $%s or %d)" % defaults['slowtimeout'])
+ harness.add_argument("-t", "--timeout", type=int,
+ help="kill errant tests after TIMEOUT seconds"
+ " (default: $%s or %d)" % defaults['timeout'])
+ harness.add_argument("--tmpdir",
+ help="run tests in the given temporary directory"
+ " (implies --keep-tmpdir)")
+ harness.add_argument("-v", "--verbose", action="store_true",
+ help="output verbose messages")
+
+ hgconf = parser.add_argument_group('Mercurial Configuration')
+ hgconf.add_argument("--chg", action="store_true",
+ help="install and use chg wrapper in place of hg")
+ hgconf.add_argument("--compiler",
+ help="compiler to build with")
+ hgconf.add_argument('--extra-config-opt', action="append", default=[],
+ help='set the given config opt in the test hgrc')
+ hgconf.add_argument("-l", "--local", action="store_true",
help="shortcut for --with-hg=<testdir>/../hg, "
"and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
- parser.add_option("--loop", action="store_true",
- help="loop tests repeatedly")
- parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
- help="run each test N times (default=1)", default=1)
- parser.add_option("-n", "--nodiff", action="store_true",
- help="skip showing test changes")
- parser.add_option("--outputdir", type="string",
- help="directory to write error logs to (default=test directory)")
- parser.add_option("-p", "--port", type="int",
- help="port on which servers should listen"
- " (default: $%s or %d)" % defaults['port'])
- parser.add_option("--compiler", type="string",
- help="compiler to build with")
- parser.add_option("--pure", action="store_true",
+ hgconf.add_argument("--ipv6", action="store_true",
+ help="prefer IPv6 to IPv4 for network related tests")
+ hgconf.add_argument("--pure", action="store_true",
help="use pure Python code instead of C extensions")
- parser.add_option("-R", "--restart", action="store_true",
- help="restart at last error")
- parser.add_option("-r", "--retest", action="store_true",
- help="retest failed tests")
- parser.add_option("-S", "--noskips", action="store_true",
- help="don't report skip tests verbosely")
- parser.add_option("--shell", type="string",
- help="shell to use (default: $%s or %s)" % defaults['shell'])
- parser.add_option("-t", "--timeout", type="int",
- help="kill errant tests after TIMEOUT seconds"
- " (default: $%s or %d)" % defaults['timeout'])
- parser.add_option("--slowtimeout", type="int",
- help="kill errant slow tests after SLOWTIMEOUT seconds"
- " (default: $%s or %d)" % defaults['slowtimeout'])
- parser.add_option("--time", action="store_true",
- help="time how long each test takes")
- parser.add_option("--json", action="store_true",
- help="store test result data in 'report.json' file")
- parser.add_option("--tmpdir", type="string",
- help="run tests in the given temporary directory"
- " (implies --keep-tmpdir)")
- parser.add_option("-v", "--verbose", action="store_true",
- help="output verbose messages")
- parser.add_option("--xunit", type="string",
- help="record xunit results at specified path")
- parser.add_option("--view", type="string",
- help="external diff viewer")
- parser.add_option("--with-hg", type="string",
+ hgconf.add_argument("-3", "--py3k-warnings", action="store_true",
+ help="enable Py3k warnings on Python 2.7+")
+ hgconf.add_argument("--with-chg", metavar="CHG",
+ help="use specified chg wrapper in place of hg")
+ hgconf.add_argument("--with-hg",
metavar="HG",
help="test using specified hg script rather than a "
"temporary installation")
- parser.add_option("--chg", action="store_true",
- help="install and use chg wrapper in place of hg")
- parser.add_option("--with-chg", metavar="CHG",
- help="use specified chg wrapper in place of hg")
- parser.add_option("--ipv6", action="store_true",
- help="prefer IPv6 to IPv4 for network related tests")
- parser.add_option("-3", "--py3k-warnings", action="store_true",
- help="enable Py3k warnings on Python 2.7+")
# This option should be deleted once test-check-py3-compat.t and other
# Python 3 tests run with Python 3.
- parser.add_option("--with-python3", metavar="PYTHON3",
- help="Python 3 interpreter (if running under Python 2)"
- " (TEMPORARY)")
- parser.add_option('--extra-config-opt', action="append",
- help='set the given config opt in the test hgrc')
- parser.add_option('--random', action="store_true",
- help='run tests in random order')
- parser.add_option('--profile-runner', action='store_true',
- help='run statprof on run-tests')
- parser.add_option('--allow-slow-tests', action='store_true',
- help='allow extremely slow tests')
- parser.add_option('--showchannels', action='store_true',
- help='show scheduling channels')
- parser.add_option('--known-good-rev', type="string",
- metavar="known_good_rev",
- help=("Automatically bisect any failures using this "
- "revision as a known-good revision."))
- parser.add_option('--bisect-repo', type="string",
- metavar='bisect_repo',
- help=("Path of a repo to bisect. Use together with "
- "--known-good-rev"))
+ hgconf.add_argument("--with-python3", metavar="PYTHON3",
+ help="Python 3 interpreter (if running under Python 2)"
+ " (TEMPORARY)")
+
+ reporting = parser.add_argument_group('Results Reporting')
+ reporting.add_argument("-C", "--annotate", action="store_true",
+ help="output files annotated with coverage")
+ reporting.add_argument("--color", choices=["always", "auto", "never"],
+ default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
+ help="colorisation: always|auto|never (default: auto)")
+ reporting.add_argument("-c", "--cover", action="store_true",
+ help="print a test coverage report")
+ reporting.add_argument('--exceptions', action='store_true',
+ help='log all exceptions and generate an exception report')
+ reporting.add_argument("-H", "--htmlcov", action="store_true",
+ help="create an HTML report of the coverage of the files")
+ reporting.add_argument("--json", action="store_true",
+ help="store test result data in 'report.json' file")
+ reporting.add_argument("--outputdir",
+ help="directory to write error logs to (default=test directory)")
+ reporting.add_argument("-n", "--nodiff", action="store_true",
+ help="skip showing test changes")
+ reporting.add_argument("-S", "--noskips", action="store_true",
+ help="don't report skip tests verbosely")
+ reporting.add_argument("--time", action="store_true",
+ help="time how long each test takes")
+ reporting.add_argument("--view",
+ help="external diff viewer")
+ reporting.add_argument("--xunit",
+ help="record xunit results at specified path")
for option, (envvar, default) in defaults.items():
defaults[option] = type(default)(os.environ.get(envvar, default))
@@ -421,7 +432,7 @@
def parseargs(args, parser):
"""Parse arguments with our OptionParser and validate results."""
- (options, args) = parser.parse_args(args)
+ options = parser.parse_args(args)
# jython is always pure
if 'java' in sys.platform or '__pypy__' in sys.modules:
@@ -550,7 +561,7 @@
if options.showchannels:
options.nodiff = True
- return (options, args)
+ return options
def rename(src, dst):
"""Like os.rename(), trade atomicity and opened files friendliness
@@ -892,10 +903,9 @@
# Diff generation may rely on written .err file.
if (ret != 0 or out != self._refout) and not self._skipped \
and not self._debug:
- f = open(self.errpath, 'wb')
- for line in out:
- f.write(line)
- f.close()
+ with open(self.errpath, 'wb') as f:
+ for line in out:
+ f.write(line)
# The result object handles diff calculation for us.
with firstlock:
@@ -936,10 +946,9 @@
if (self._ret != 0 or self._out != self._refout) and not self._skipped \
and not self._debug and self._out:
- f = open(self.errpath, 'wb')
- for line in self._out:
- f.write(line)
- f.close()
+ with open(self.errpath, 'wb') as f:
+ for line in self._out:
+ f.write(line)
vlog("# Ret was:", self._ret, '(%s)' % self.name)
@@ -967,13 +976,20 @@
self._portmap(0),
self._portmap(1),
self._portmap(2),
- (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
- br'\1 (glob)'),
(br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
(br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
]
r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
+ replacementfile = os.path.join(self._testdir, b'common-pattern.py')
+
+ if os.path.exists(replacementfile):
+ data = {}
+ with open(replacementfile, mode='rb') as source:
+ # the intermediate 'compile' step help with debugging
+ code = compile(source.read(), replacementfile, 'exec')
+ exec(code, data)
+ r.extend(data.get('substitutions', ()))
return r
def _escapepath(self, p):
@@ -1075,29 +1091,31 @@
def _createhgrc(self, path):
"""Create an hgrc file for this test."""
- hgrc = open(path, 'wb')
- hgrc.write(b'[ui]\n')
- hgrc.write(b'slash = True\n')
- hgrc.write(b'interactive = False\n')
- hgrc.write(b'mergemarkers = detailed\n')
- hgrc.write(b'promptecho = True\n')
- hgrc.write(b'[defaults]\n')
- hgrc.write(b'[devel]\n')
- hgrc.write(b'all-warnings = true\n')
- hgrc.write(b'default-date = 0 0\n')
- hgrc.write(b'[largefiles]\n')
- hgrc.write(b'usercache = %s\n' %
- (os.path.join(self._testtmp, b'.cache/largefiles')))
- hgrc.write(b'[web]\n')
- hgrc.write(b'address = localhost\n')
- hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
-
- for opt in self._extraconfigopts:
- section, key = opt.split('.', 1)
- assert '=' in key, ('extra config opt %s must '
- 'have an = for assignment' % opt)
- hgrc.write(b'[%s]\n%s\n' % (section, key))
- hgrc.close()
+ with open(path, 'wb') as hgrc:
+ hgrc.write(b'[ui]\n')
+ hgrc.write(b'slash = True\n')
+ hgrc.write(b'interactive = False\n')
+ hgrc.write(b'mergemarkers = detailed\n')
+ hgrc.write(b'promptecho = True\n')
+ hgrc.write(b'[defaults]\n')
+ hgrc.write(b'[devel]\n')
+ hgrc.write(b'all-warnings = true\n')
+ hgrc.write(b'default-date = 0 0\n')
+ hgrc.write(b'[largefiles]\n')
+ hgrc.write(b'usercache = %s\n' %
+ (os.path.join(self._testtmp, b'.cache/largefiles')))
+ hgrc.write(b'[lfs]\n')
+ hgrc.write(b'usercache = %s\n' %
+ (os.path.join(self._testtmp, b'.cache/lfs')))
+ hgrc.write(b'[web]\n')
+ hgrc.write(b'address = localhost\n')
+ hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
+
+ for opt in self._extraconfigopts:
+ section, key = opt.encode('utf-8').split(b'.', 1)
+ assert b'=' in key, ('extra config opt %s must '
+ 'have an = for assignment' % opt)
+ hgrc.write(b'[%s]\n%s\n' % (section, key))
def fail(self, msg):
# unittest differentiates between errored and failed.
@@ -1203,9 +1221,7 @@
def __init__(self, path, *args, **kwds):
# accept an extra "case" parameter
- case = None
- if 'case' in kwds:
- case = kwds.pop('case')
+ case = kwds.pop('case', None)
self._case = case
self._allcases = parsettestcases(path)
super(TTest, self).__init__(path, *args, **kwds)
@@ -1219,9 +1235,8 @@
return os.path.join(self._testdir, self.bname)
def _run(self, env):
- f = open(self.path, 'rb')
- lines = f.readlines()
- f.close()
+ with open(self.path, 'rb') as f:
+ lines = f.readlines()
# .t file is both reference output and the test input, keep reference
# output updated with the the test input. This avoids some race
@@ -1233,10 +1248,9 @@
# Write out the generated script.
fname = b'%s.sh' % self._testtmp
- f = open(fname, 'wb')
- for l in script:
- f.write(l)
- f.close()
+ with open(fname, 'wb') as f:
+ for l in script:
+ f.write(l)
cmd = b'%s "%s"' % (self._shell, fname)
vlog("# Running", cmd)
@@ -1326,6 +1340,9 @@
script.append(b'alias hg="%s"\n' % self._hgcommand)
if os.getenv('MSYSTEM'):
script.append(b'alias pwd="pwd -W"\n')
+ if self._case:
+ script.append(b'TESTCASE=%s\n' % shellquote(self._case))
+ script.append(b'export TESTCASE\n')
n = 0
for n, l in enumerate(lines):
@@ -1436,10 +1453,7 @@
r = self.linematch(el, lout)
if isinstance(r, str):
- if r == '+glob':
- lout = el[:-1] + ' (glob)\n'
- r = '' # Warn only this line.
- elif r == '-glob':
+ if r == '-glob':
lout = ''.join(el.rsplit(' (glob)', 1))
r = '' # Warn only this line.
elif r == "retry":
@@ -1523,6 +1537,7 @@
@staticmethod
def rematch(el, l):
try:
+ el = b'(?:' + el + b')'
# use \Z to ensure that the regex matches to the end of the string
if os.name == 'nt':
return re.match(el + br'\r?\n\Z', l)
@@ -1594,8 +1609,10 @@
if l.endswith(b" (glob)\n"):
l = l[:-8] + b"\n"
return TTest.globmatch(el[:-8], l) or retry
- if os.altsep and l.replace(b'\\', b'/') == el:
- return b'+glob'
+ if os.altsep:
+ _l = l.replace(b'\\', b'/')
+ if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
+ return True
return retry
@staticmethod
@@ -1873,9 +1890,8 @@
continue
if self._keywords:
- f = open(test.path, 'rb')
- t = f.read().lower() + test.bname.lower()
- f.close()
+ with open(test.path, 'rb') as f:
+ t = f.read().lower() + test.bname.lower()
ignored = False
for k in self._keywords.lower().split():
if k not in t:
@@ -2104,6 +2120,18 @@
os.environ['PYTHONHASHSEED'])
if self._runner.options.time:
self.printtimes(result.times)
+
+ if self._runner.options.exceptions:
+ exceptions = aggregateexceptions(
+ os.path.join(self._runner._outputdir, b'exceptions'))
+ total = sum(exceptions.values())
+
+ self.stream.writeln('Exceptions Report:')
+ self.stream.writeln('%d total from %d frames' %
+ (total, len(exceptions)))
+ for (frame, line, exc), count in exceptions.most_common():
+ self.stream.writeln('%d\t%s: %s' % (count, frame, exc))
+
self.stream.flush()
return result
@@ -2251,6 +2279,50 @@
separators=(',', ': '))
outf.writelines(("testreport =", jsonout))
+def sorttests(testdescs, shuffle=False):
+ """Do an in-place sort of tests."""
+ if shuffle:
+ random.shuffle(testdescs)
+ return
+
+ # keywords for slow tests
+ slow = {b'svn': 10,
+ b'cvs': 10,
+ b'hghave': 10,
+ b'largefiles-update': 10,
+ b'run-tests': 10,
+ b'corruption': 10,
+ b'race': 10,
+ b'i18n': 10,
+ b'check': 100,
+ b'gendoc': 100,
+ b'contrib-perf': 200,
+ }
+ perf = {}
+
+ def sortkey(f):
+ # run largest tests first, as they tend to take the longest
+ f = f['path']
+ try:
+ return perf[f]
+ except KeyError:
+ try:
+ val = -os.stat(f).st_size
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ perf[f] = -1e9 # file does not exist, tell early
+ return -1e9
+ for kw, mul in slow.items():
+ if kw in f:
+ val *= mul
+ if f.endswith(b'.py'):
+ val /= 10.0
+ perf[f] = val / 1000.0
+ return perf[f]
+
+ testdescs.sort(key=sortkey)
+
class TestRunner(object):
"""Holds context for executing tests.
@@ -2295,18 +2367,16 @@
oldmask = os.umask(0o22)
try:
parser = parser or getparser()
- options, args = parseargs(args, parser)
- # positional arguments are paths to test files to run, so
- # we make sure they're all bytestrings
- args = [_bytespath(a) for a in args]
+ options = parseargs(args, parser)
+ tests = [_bytespath(a) for a in options.tests]
if options.test_list is not None:
for listfile in options.test_list:
with open(listfile, 'rb') as f:
- args.extend(t for t in f.read().splitlines() if t)
+ tests.extend(t for t in f.read().splitlines() if t)
self.options = options
self._checktools()
- testdescs = self.findtests(args)
+ testdescs = self.findtests(tests)
if options.profile_runner:
import statprof
statprof.start()
@@ -2320,51 +2390,22 @@
os.umask(oldmask)
def _run(self, testdescs):
- if self.options.random:
- random.shuffle(testdescs)
- else:
- # keywords for slow tests
- slow = {b'svn': 10,
- b'cvs': 10,
- b'hghave': 10,
- b'largefiles-update': 10,
- b'run-tests': 10,
- b'corruption': 10,
- b'race': 10,
- b'i18n': 10,
- b'check': 100,
- b'gendoc': 100,
- b'contrib-perf': 200,
- }
- perf = {}
- def sortkey(f):
- # run largest tests first, as they tend to take the longest
- f = f['path']
- try:
- return perf[f]
- except KeyError:
- try:
- val = -os.stat(f).st_size
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- perf[f] = -1e9 # file does not exist, tell early
- return -1e9
- for kw, mul in slow.items():
- if kw in f:
- val *= mul
- if f.endswith(b'.py'):
- val /= 10.0
- perf[f] = val / 1000.0
- return perf[f]
- testdescs.sort(key=sortkey)
+ sorttests(testdescs, shuffle=self.options.random)
self._testdir = osenvironb[b'TESTDIR'] = getattr(
os, 'getcwdb', os.getcwd)()
+ # assume all tests in same folder for now
+ if testdescs:
+ pathname = os.path.dirname(testdescs[0]['path'])
+ if pathname:
+ osenvironb[b'TESTDIR'] = os.path.join(osenvironb[b'TESTDIR'],
+ pathname)
if self.options.outputdir:
self._outputdir = canonpath(_bytespath(self.options.outputdir))
else:
self._outputdir = self._testdir
+ if testdescs and pathname:
+ self._outputdir = os.path.join(self._outputdir, pathname)
if 'PYTHONHASHSEED' not in os.environ:
# use a random python hash seed all the time
@@ -2381,11 +2422,6 @@
print("error: temp dir %r already exists" % tmpdir)
return 1
- # Automatically removing tmpdir sounds convenient, but could
- # really annoy anyone in the habit of using "--tmpdir=/tmp"
- # or "--tmpdir=$HOME".
- #vlog("# Removing temp dir", tmpdir)
- #shutil.rmtree(tmpdir)
os.makedirs(tmpdir)
else:
d = None
@@ -2407,12 +2443,27 @@
self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
os.makedirs(self._tmpbindir)
- # This looks redundant with how Python initializes sys.path from
- # the location of the script being executed. Needed because the
- # "hg" specified by --with-hg is not the only Python script
- # executed in the test suite that needs to import 'mercurial'
- # ... which means it's not really redundant at all.
- self._pythondir = self._bindir
+ normbin = os.path.normpath(os.path.abspath(whg))
+ normbin = normbin.replace(os.sep.encode('ascii'), b'/')
+
+ # Other Python scripts in the test harness need to
+ # `import mercurial`. If `hg` is a Python script, we assume
+ # the Mercurial modules are relative to its path and tell the tests
+ # to load Python modules from its directory.
+ with open(whg, 'rb') as fh:
+ initial = fh.read(1024)
+
+ if re.match(b'#!.*python', initial):
+ self._pythondir = self._bindir
+ # If it looks like our in-repo Rust binary, use the source root.
+ # This is a bit hacky. But rhg is still not supported outside the
+ # source directory. So until it is, do the simple thing.
+ elif re.search(b'/rust/target/[^/]+/hg', normbin):
+ self._pythondir = os.path.dirname(self._testdir)
+ # Fall back to the legacy behavior.
+ else:
+ self._pythondir = self._bindir
+
else:
self._installdir = os.path.join(self._hgtmp, b"install")
self._bindir = os.path.join(self._installdir, b"bin")
@@ -2484,6 +2535,23 @@
self._coveragefile = os.path.join(self._testdir, b'.coverage')
+ if self.options.exceptions:
+ exceptionsdir = os.path.join(self._outputdir, b'exceptions')
+ try:
+ os.makedirs(exceptionsdir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Remove all existing exception reports.
+ for f in os.listdir(exceptionsdir):
+ os.unlink(os.path.join(exceptionsdir, f))
+
+ osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
+ logexceptions = os.path.join(self._testdir, b'logexceptions.py')
+ self.options.extra_config_opt.append(
+ 'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
+
vlog("# Using TESTDIR", self._testdir)
vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
vlog("# Using HGTMP", self._hgtmp)
@@ -2512,6 +2580,16 @@
else:
args = os.listdir(b'.')
+ expanded_args = []
+ for arg in args:
+ if os.path.isdir(arg):
+ if not arg.endswith(b'/'):
+ arg += b'/'
+ expanded_args.extend([arg + a for a in os.listdir(arg)])
+ else:
+ expanded_args.append(arg)
+ args = expanded_args
+
tests = []
for t in args:
if not (os.path.basename(t).startswith(b'test-')
@@ -2767,13 +2845,12 @@
if e.errno != errno.ENOENT:
raise
else:
- f = open(installerrs, 'rb')
- for line in f:
- if PYTHON3:
- sys.stdout.buffer.write(line)
- else:
- sys.stdout.write(line)
- f.close()
+ with open(installerrs, 'rb') as f:
+ for line in f:
+ if PYTHON3:
+ sys.stdout.buffer.write(line)
+ else:
+ sys.stdout.write(line)
sys.exit(1)
os.chdir(self._testdir)
@@ -2781,28 +2858,24 @@
if self.options.py3k_warnings and not self.options.anycoverage:
vlog("# Updating hg command to enable Py3k Warnings switch")
- f = open(os.path.join(self._bindir, 'hg'), 'rb')
- lines = [line.rstrip() for line in f]
- lines[0] += ' -3'
- f.close()
- f = open(os.path.join(self._bindir, 'hg'), 'wb')
- for line in lines:
- f.write(line + '\n')
- f.close()
+ with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
+ lines = [line.rstrip() for line in f]
+ lines[0] += ' -3'
+ with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
+ for line in lines:
+ f.write(line + '\n')
hgbat = os.path.join(self._bindir, b'hg.bat')
if os.path.isfile(hgbat):
# hg.bat expects to be put in bin/scripts while run-tests.py
# installation layout put it in bin/ directly. Fix it
- f = open(hgbat, 'rb')
- data = f.read()
- f.close()
+ with open(hgbat, 'rb') as f:
+ data = f.read()
if b'"%~dp0..\python" "%~dp0hg" %*' in data:
data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
b'"%~dp0python" "%~dp0hg" %*')
- f = open(hgbat, 'wb')
- f.write(data)
- f.close()
+ with open(hgbat, 'wb') as f:
+ f.write(data)
else:
print('WARNING: cannot fix hg.bat reference to python.exe')
@@ -2927,6 +3000,24 @@
print("WARNING: Did not find prerequisite tool: %s " %
p.decode("utf-8"))
+def aggregateexceptions(path):
+ exceptions = collections.Counter()
+
+ for f in os.listdir(path):
+ with open(os.path.join(path, f), 'rb') as fh:
+ data = fh.read().split(b'\0')
+ if len(data) != 4:
+ continue
+
+ exc, mainframe, hgframe, hgline = data
+ exc = exc.decode('utf-8')
+ mainframe = mainframe.decode('utf-8')
+ hgframe = hgframe.decode('utf-8')
+ hgline = hgline.decode('utf-8')
+ exceptions[(hgframe, hgline, exc)] += 1
+
+ return exceptions
+
if __name__ == '__main__':
runner = TestRunner()
--- a/tests/seq.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/seq.py Mon Jan 22 17:53:02 2018 -0500
@@ -10,6 +10,9 @@
from __future__ import absolute_import, print_function
import sys
+if sys.version_info[0] >= 3:
+ xrange = range
+
start = 1
if len(sys.argv) > 2:
start = int(sys.argv[1])
--- a/tests/test-acl.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-acl.t Mon Jan 22 17:53:02 2018 -0500
@@ -93,14 +93,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -156,14 +156,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -222,14 +222,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -298,14 +298,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -366,14 +366,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -439,14 +439,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -509,14 +509,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -584,14 +584,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -656,14 +656,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -730,14 +730,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -813,14 +813,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -894,14 +894,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -925,7 +925,7 @@
bundle2-input-bundle: 4 parts total
transaction abort!
rollback completed
- abort: No such file or directory: ../acl.config
+ abort: $ENOENT$: ../acl.config
no rollback information available
0:6675d58eff77
@@ -970,14 +970,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1057,14 +1057,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1143,14 +1143,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1225,14 +1225,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1304,14 +1304,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1387,14 +1387,14 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
bundle2-input-part: "check:heads" supported
@@ -1507,14 +1507,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1591,14 +1591,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1668,14 +1668,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1741,14 +1741,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1808,14 +1808,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1897,14 +1897,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -1985,14 +1985,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -2057,14 +2057,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
@@ -2139,14 +2139,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 168 bytes payload
+ bundle2-output-part: "replycaps" 188 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 168
+ bundle2-input-part: total payload size 188
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
bundle2-input-part: "check:heads" supported
--- a/tests/test-add.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-add.t Mon Jan 22 17:53:02 2018 -0500
@@ -104,7 +104,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg st
M a
@@ -197,17 +197,17 @@
$ echo def > CapsDir1/CapsDir/SubDir/Def.txt
$ hg add capsdir1/capsdir
- adding CapsDir1/CapsDir/AbC.txt (glob)
- adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ adding CapsDir1/CapsDir/AbC.txt
+ adding CapsDir1/CapsDir/SubDir/Def.txt
$ hg forget capsdir1/capsdir/abc.txt
$ hg forget capsdir1/capsdir
- removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ removing CapsDir1/CapsDir/SubDir/Def.txt
$ hg add capsdir1
- adding CapsDir1/CapsDir/AbC.txt (glob)
- adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ adding CapsDir1/CapsDir/AbC.txt
+ adding CapsDir1/CapsDir/SubDir/Def.txt
$ hg ci -m "AbCDef" capsdir1/capsdir
@@ -216,14 +216,14 @@
C CapsDir1/CapsDir/SubDir/Def.txt
$ hg files capsdir1/capsdir
- CapsDir1/CapsDir/AbC.txt (glob)
- CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ CapsDir1/CapsDir/AbC.txt
+ CapsDir1/CapsDir/SubDir/Def.txt
$ echo xyz > CapsDir1/CapsDir/SubDir/Def.txt
$ hg ci -m xyz capsdir1/capsdir/subdir/def.txt
$ hg revert -r '.^' capsdir1/capsdir
- reverting CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ reverting CapsDir1/CapsDir/SubDir/Def.txt
The conditional tests above mean the hash on the diff line differs on Windows
and OS X
@@ -244,8 +244,8 @@
$ hg remove -f 'glob:**.txt' -X capsdir1/capsdir
$ hg remove -f 'glob:**.txt' -I capsdir1/capsdir
- removing CapsDir1/CapsDir/ABC.txt (glob)
- removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
+ removing CapsDir1/CapsDir/ABC.txt
+ removing CapsDir1/CapsDir/SubDir/Def.txt
#endif
$ cd ..
--- a/tests/test-addremove-similar.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-addremove-similar.t Mon Jan 22 17:53:02 2018 -0500
@@ -153,7 +153,7 @@
$ hg addremove -s80
removing d/a
adding d/b
- recording removal of d/a as rename to d/b (100% similar) (glob)
+ recording removal of d/a as rename to d/b (100% similar)
$ hg debugstate
r 0 0 1970-01-01 00:00:00 d/a
a 0 -1 unset d/b
@@ -163,12 +163,12 @@
no copies found here (since the target isn't in d
$ hg addremove -s80 d
- removing d/b (glob)
+ removing d/b
copies here
$ hg addremove -s80
adding c
- recording removal of d/a as rename to c (100% similar) (glob)
+ recording removal of d/a as rename to c (100% similar)
$ cd ..
--- a/tests/test-addremove.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-addremove.t Mon Jan 22 17:53:02 2018 -0500
@@ -31,8 +31,7 @@
$ hg forget foo
$ hg -v addremove nonexistent
- nonexistent: The system cannot find the file specified (windows !)
- nonexistent: No such file or directory (no-windows !)
+ nonexistent: $ENOENT$
[1]
$ cd ..
@@ -86,8 +85,7 @@
$ rm c
$ hg ci -A -m "c" nonexistent
- nonexistent: The system cannot find the file specified (windows !)
- nonexistent: No such file or directory (no-windows !)
+ nonexistent: $ENOENT$
abort: failed to mark all new/missing files as added/removed
[255]
--- a/tests/test-alias.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-alias.t Mon Jan 22 17:53:02 2018 -0500
@@ -119,6 +119,12 @@
$ hg help noclosing
error in definition for alias 'noclosingquotation': No closing quotation
+"--" in alias definition should be preserved
+
+ $ hg --config alias.dash='cat --' -R alias dash -r0
+ abort: -r0 not under root '$TESTTMP/alias'
+ (consider using '--cwd alias')
+ [255]
invalid options
@@ -148,6 +154,12 @@
$ hg no--config
abort: error in definition for alias 'no--config': --config may only be given on the command line
[255]
+ $ hg no --config alias.no='--repo elsewhere --cwd elsewhere status'
+ abort: error in definition for alias 'no': --repo/--cwd may only be given on the command line
+ [255]
+ $ hg no --config alias.no='--repo elsewhere'
+ abort: error in definition for alias 'no': --repo may only be given on the command line
+ [255]
optional repository
@@ -351,6 +363,10 @@
$ hg echoall --cwd ..
+"--" passed to shell alias should be preserved
+
+ $ hg --config alias.printf='!printf "$@"' printf '%s %s %s\n' -- --cwd ..
+ -- --cwd ..
repo specific shell aliases
--- a/tests/test-amend-subrepo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-amend-subrepo.t Mon Jan 22 17:53:02 2018 -0500
@@ -58,7 +58,7 @@
$ echo a >> s/a
$ hg add -R s
- adding s/a (glob)
+ adding s/a
$ hg amend
abort: uncommitted changes in subrepository "s"
(use --subrepos for recursive commit)
--- a/tests/test-amend.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-amend.t Mon Jan 22 17:53:02 2018 -0500
@@ -29,7 +29,7 @@
$ echo 2 >> B
$ hg amend
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/112478962961-7e959a55-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/112478962961-7e959a55-amend.hg (obsstore-off !)
#if obsstore-off
$ hg log -p -G --hidden -T '{rev} {node|short} {desc}\n'
@ 1 be169c7e8dbe B
@@ -99,13 +99,13 @@
$ echo 4 > D
$ hg add C D
$ hg amend -m NEWMESSAGE -I C
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/be169c7e8dbe-7684ddc5-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/be169c7e8dbe-7684ddc5-amend.hg (obsstore-off !)
$ hg log -r . -T '{node|short} {desc} {files}\n'
c7ba14d9075b NEWMESSAGE B C
$ echo 5 > E
$ rm C
$ hg amend -d '2000 1000' -u 'Foo <foo@example.com>' -A C D
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/c7ba14d9075b-b3e76daa-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/c7ba14d9075b-b3e76daa-amend.hg (obsstore-off !)
$ hg log -r . -T '{node|short} {desc} {files} {author} {date}\n'
14f6c4bcc865 NEWMESSAGE B D Foo <foo@example.com> 2000.01000
@@ -119,11 +119,11 @@
$ chmod +x $TESTTMP/prefix.sh
$ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend --edit
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/14f6c4bcc865-6591f15d-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/14f6c4bcc865-6591f15d-amend.hg (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
298f085230c3 EDITED: NEWMESSAGE
$ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend -e -m MSG
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/298f085230c3-d81a6ad3-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/298f085230c3-d81a6ad3-amend.hg (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
974f07f28537 EDITED: MSG
@@ -132,7 +132,7 @@
abort: options --message and --logfile are mutually exclusive
[255]
$ hg amend -l $TESTTMP/msg
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/974f07f28537-edb6470a-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/974f07f28537-edb6470a-amend.hg (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
507be9bdac71 FOO
@@ -152,7 +152,7 @@
new file mode 100644
examine changes to 'G'? [Ynesfdaq?] n
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/507be9bdac71-c8077452-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/507be9bdac71-c8077452-amend.hg (obsstore-off !)
$ hg log -r . -T '{files}\n'
B D F
@@ -185,10 +185,11 @@
> EOF
$ hg amend
+ 1 new orphan changesets
$ hg log -T '{rev} {node|short} {desc}\n' -G
@ 3 be169c7e8dbe B
|
- | o 2 26805aba1e60 C
+ | * 2 26805aba1e60 C
| |
| x 1 112478962961 B
|/
@@ -203,8 +204,8 @@
[255]
$ hg amend --note "adding bar"
$ hg debugobsolete -r .
- 112478962961147124edd43549aedd1a335e44bf be169c7e8dbe21cd10b3d79691cbe7f241e3c21c 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- be169c7e8dbe21cd10b3d79691cbe7f241e3c21c 16084da537dd8f84cfdb3055c633772269d62e1b 0 (Thu Jan 01 00:00:00 1970 +0000) {'note': 'adding bar', 'operation': 'amend', 'user': 'test'}
+ 112478962961147124edd43549aedd1a335e44bf be169c7e8dbe21cd10b3d79691cbe7f241e3c21c 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
+ be169c7e8dbe21cd10b3d79691cbe7f241e3c21c 16084da537dd8f84cfdb3055c633772269d62e1b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'adding bar', 'operation': 'amend', 'user': 'test'}
#endif
Cannot amend public changeset
@@ -213,6 +214,7 @@
$ hg update -C -q A
$ hg amend -m AMEND
abort: cannot amend public changesets
+ (see 'hg help phases' for details)
[255]
Amend a merge changeset
@@ -226,7 +228,7 @@
> EOS
$ hg update -q C
$ hg amend -m FOO
- saved backup bundle to $TESTTMP/repo3/.hg/strip-backup/a35c07e8a2a4-15ff4612-amend.hg (glob) (obsstore-off !)
+ saved backup bundle to $TESTTMP/repo3/.hg/strip-backup/a35c07e8a2a4-15ff4612-amend.hg (obsstore-off !)
$ rm .hg/localtags
$ hg log -G -T '{desc}\n'
@ FOO
--- a/tests/test-annotate.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-annotate.t Mon Jan 22 17:53:02 2018 -0500
@@ -556,8 +556,8 @@
$ rm baz
$ hg annotate -ncr "wdir()" baz
- abort: $TESTTMP\repo\baz: The system cannot find the file specified (windows !)
- abort: No such file or directory: $TESTTMP/repo/baz (no-windows !)
+ abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
+ abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
[255]
annotate removed file
@@ -565,8 +565,8 @@
$ hg rm baz
$ hg annotate -ncr "wdir()" baz
- abort: $TESTTMP\repo\baz: The system cannot find the file specified (windows !)
- abort: No such file or directory: $TESTTMP/repo/baz (no-windows !)
+ abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
+ abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
[255]
$ hg revert --all --no-backup --quiet
--- a/tests/test-archive.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-archive.t Mon Jan 22 17:53:02 2018 -0500
@@ -32,7 +32,7 @@
sharing subrepo subrepo from $TESTTMP/test/subrepo
5 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat shared1/subrepo/.hg/sharedpath
- $TESTTMP/test/subrepo/.hg (no-eol) (glob)
+ $TESTTMP/test/subrepo/.hg (no-eol)
hg subrepos are shared into existence on demand if the parent was shared
@@ -45,7 +45,7 @@
sharing subrepo subrepo from $TESTTMP/test/subrepo
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat share2/subrepo/.hg/sharedpath
- $TESTTMP/test/subrepo/.hg (no-eol) (glob)
+ $TESTTMP/test/subrepo/.hg (no-eol)
$ echo 'mod' > share2/subrepo/sub
$ hg -R share2 ci -Sqm 'subrepo mod'
$ hg -R clone1 update -C tip
@@ -79,7 +79,7 @@
$ hg -R shared3 archive --config ui.archivemeta=False -r tip -S archive
sharing subrepo subrepo from $TESTTMP/test/subrepo
$ cat shared3/subrepo/.hg/sharedpath
- $TESTTMP/test/subrepo/.hg (no-eol) (glob)
+ $TESTTMP/test/subrepo/.hg (no-eol)
$ diff -r archive test
Only in test: .hg
Common subdirectories: archive/baz and test/baz (?)
--- a/tests/test-audit-path.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-audit-path.t Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
audit of .hg
$ hg add .hg/00changelog.i
- abort: path contains illegal component: .hg/00changelog.i (glob)
+ abort: path contains illegal component: .hg/00changelog.i
[255]
#if symlink
@@ -17,14 +17,14 @@
$ ln -s a b
$ echo b > a/b
$ hg add b/b
- abort: path 'b/b' traverses symbolic link 'b' (glob)
+ abort: path 'b/b' traverses symbolic link 'b'
[255]
$ hg add b
should still fail - maybe
$ hg add b/b
- abort: path 'b/b' traverses symbolic link 'b' (glob)
+ abort: path 'b/b' traverses symbolic link 'b'
[255]
$ hg commit -m 'add symlink b'
@@ -86,7 +86,7 @@
$ hg manifest -r0
.hg/test
$ hg update -Cr0
- abort: path contains illegal component: .hg/test (glob)
+ abort: path contains illegal component: .hg/test
[255]
attack foo/.hg/test
@@ -94,7 +94,7 @@
$ hg manifest -r1
foo/.hg/test
$ hg update -Cr1
- abort: path 'foo/.hg/test' is inside nested repo 'foo' (glob)
+ abort: path 'foo/.hg/test' is inside nested repo 'foo'
[255]
attack back/test where back symlinks to ..
@@ -121,7 +121,7 @@
$ mkdir ../test
$ echo data > ../test/file
$ hg update -Cr3
- abort: path contains illegal component: ../test (glob)
+ abort: path contains illegal component: ../test
[255]
$ cat ../test/file
data
@@ -131,7 +131,7 @@
$ hg manifest -r4
/tmp/test
$ hg update -Cr4
- abort: path contains illegal component: /tmp/test (glob)
+ abort: path contains illegal component: /tmp/test
[255]
$ cd ..
--- a/tests/test-audit-subrepo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-audit-subrepo.t Mon Jan 22 17:53:02 2018 -0500
@@ -9,7 +9,7 @@
$ hg init sub/.hg
$ echo 'sub/.hg = sub/.hg' >> .hgsub
$ hg ci -qAm 'add subrepo "sub/.hg"'
- abort: path 'sub/.hg' is inside nested repo 'sub' (glob)
+ abort: path 'sub/.hg' is inside nested repo 'sub'
[255]
prepare tampered repo (including the commit above):
@@ -33,7 +33,7 @@
on clone (and update):
$ hg clone -q hgname hgname2
- abort: path 'sub/.hg' is inside nested repo 'sub' (glob)
+ abort: path 'sub/.hg' is inside nested repo 'sub'
[255]
Test direct symlink traversal
--- a/tests/test-basic.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-basic.t Mon Jan 22 17:53:02 2018 -0500
@@ -4,7 +4,8 @@
devel.all-warnings=true
devel.default-date=0 0
extensions.fsmonitor= (fsmonitor !)
- largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
+ largefiles.usercache=$TESTTMP/.cache/largefiles
+ lfs.usercache=$TESTTMP/.cache/lfs
ui.slash=True
ui.interactive=False
ui.mergemarkers=detailed
@@ -33,15 +34,7 @@
[255]
#endif
-#if devfull no-chg
- $ hg status >/dev/full 2>&1
- [1]
-
- $ hg status ENOENT 2>/dev/full
- [1]
-#endif
-
-#if devfull chg
+#if devfull
$ hg status >/dev/full 2>&1
[255]
--- a/tests/test-blackbox.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-blackbox.t Mon Jan 22 17:53:02 2018 -0500
@@ -60,7 +60,7 @@
adding c
$ cd ../blackboxtest2
$ hg pull
- pulling from $TESTTMP/blackboxtest (glob)
+ pulling from $TESTTMP/blackboxtest
searching for changes
adding changesets
adding manifests
@@ -85,7 +85,7 @@
$ mkdir .hg/blackbox.log
$ hg --debug incoming
warning: cannot write to blackbox.log: * (glob)
- comparing with $TESTTMP/blackboxtest (glob)
+ comparing with $TESTTMP/blackboxtest
query 1; heads
searching for changes
all local heads known remotely
@@ -104,7 +104,7 @@
$ hg pull
- pulling from $TESTTMP/blackboxtest (glob)
+ pulling from $TESTTMP/blackboxtest
searching for changes
adding changesets
adding manifests
@@ -133,7 +133,7 @@
saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg (glob)
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated base branch cache in * seconds (glob)
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote base branch cache with 1 labels and 2 nodes
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
@@ -193,37 +193,70 @@
> os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
> \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
$ $PYTHON $TESTDIR/blackbox-readonly-dispatch.py
- running: add foo
+ running: --debug add foo
+ warning: cannot write to blackbox.log: Is a directory (no-windows !)
+ warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
+ adding foo
result: 0
- running: commit -m commit1 -d 2000-01-01 foo
+ running: --debug commit -m commit1 -d 2000-01-01 foo
+ warning: cannot write to blackbox.log: Is a directory (no-windows !)
+ warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
+ committing files:
+ foo
+ committing manifest
+ committing changelog
+ updating the branch cache
+ committed changeset 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
result: None
- running: commit -m commit2 -d 2000-01-02 foo
+ running: --debug commit -m commit2 -d 2000-01-02 foo
+ committing files:
+ foo
+ committing manifest
+ committing changelog
+ updating the branch cache
+ committed changeset 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
result: None
- running: log -r 0
- changeset: 0:0e4634943879
+ running: --debug log -r 0
+ changeset: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
+ phase: draft
+ parent: -1:0000000000000000000000000000000000000000
+ parent: -1:0000000000000000000000000000000000000000
+ manifest: 0:9091aa5df980aea60860a2e39c95182e68d1ddec
user: test
date: Sat Jan 01 00:00:00 2000 +0000
- summary: commit1
+ files+: foo
+ extra: branch=default
+ description:
+ commit1
+
result: None
- running: log -r tip
- changeset: 1:45589e459b2e
+ running: --debug log -r tip
+ changeset: 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
tag: tip
+ phase: draft
+ parent: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
+ parent: -1:0000000000000000000000000000000000000000
+ manifest: 1:895aa9b7886f89dd017a6d62524e1f9180b04df9
user: test
date: Sun Jan 02 00:00:00 2000 +0000
- summary: commit2
+ files: foo
+ extra: branch=default
+ description:
+ commit2
+
result: None
$ hg blackbox
- 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> commit -m commit2 -d 2000-01-02 foo
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated served branch cache in * seconds (glob)
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote served branch cache with 1 labels and 1 nodes
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> commit -m commit2 -d 2000-01-02 foo exited 0 after * seconds (glob)
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug commit -m commit2 -d 2000-01-02 foo exited 0 after *.?? seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0 exited 0 after * seconds (glob)
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r tip
- 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r tip exited 0 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0 exited 0 after *.?? seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob)
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox
Test log recursion from dirty status check
--- a/tests/test-bookmarks-pushpull.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bookmarks-pushpull.t Mon Jan 22 17:53:02 2018 -0500
@@ -1,3 +1,12 @@
+#testcases b2-pushkey b2-binary
+
+#if b2-pushkey
+ $ cat << EOF >> $HGRCPATH
+ > [devel]
+ > legacy.exchange=bookmarks
+ > EOF
+#endif
+
#require serve
$ cat << EOF >> $HGRCPATH
@@ -103,14 +112,222 @@
delete a remote bookmark
$ hg book -d W
- $ hg push -B W ../a --config "$TESTHOOK"
+
+#if b2-pushkey
+
+ $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
pushing to ../a
+ query 1; heads
searching for changes
+ all remote heads known locally
+ listing keys for "phases"
+ checking for updated bookmarks
+ listing keys for "bookmarks"
no changes found
+ bundle2-output-bundle: "HG20", 4 parts total
+ bundle2-output: start emission of HG20 stream
+ bundle2-output: bundle parameter:
+ bundle2-output: start of parts
+ bundle2-output: bundle part: "replycaps"
+ bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output: part 0: "REPLYCAPS"
+ bundle2-output: header chunk size: 16
+ bundle2-output: payload chunk size: 205
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "check:bookmarks"
+ bundle2-output-part: "check:bookmarks" 23 bytes payload
+ bundle2-output: part 1: "CHECK:BOOKMARKS"
+ bundle2-output: header chunk size: 22
+ bundle2-output: payload chunk size: 23
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "check:phases"
+ bundle2-output-part: "check:phases" 48 bytes payload
+ bundle2-output: part 2: "CHECK:PHASES"
+ bundle2-output: header chunk size: 19
+ bundle2-output: payload chunk size: 48
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "pushkey"
+ bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
+ bundle2-output: part 3: "PUSHKEY"
+ bundle2-output: header chunk size: 90
+ bundle2-output: closing payload chunk
+ bundle2-output: end of bundle
+ bundle2-input: start processing of HG20 stream
+ bundle2-input: reading bundle2 stream parameters
+ bundle2-input-bundle: with-transaction
+ bundle2-input: start extraction of bundle2 parts
+ bundle2-input: part header size: 16
+ bundle2-input: part type: "REPLYCAPS"
+ bundle2-input: part id: "0"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part replycaps
+ bundle2-input-part: "replycaps" supported
+ bundle2-input: payload chunk size: 205
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 205
+ bundle2-input: part header size: 22
+ bundle2-input: part type: "CHECK:BOOKMARKS"
+ bundle2-input: part id: "1"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part check:bookmarks
+ bundle2-input-part: "check:bookmarks" supported
+ bundle2-input: payload chunk size: 23
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 23
+ bundle2-input: part header size: 19
+ bundle2-input: part type: "CHECK:PHASES"
+ bundle2-input: part id: "2"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part check:phases
+ bundle2-input-part: "check:phases" supported
+ bundle2-input: payload chunk size: 48
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 48
+ bundle2-input: part header size: 90
+ bundle2-input: part type: "PUSHKEY"
+ bundle2-input: part id: "3"
+ bundle2-input: part parameters: 4
+ bundle2-input: found a handler for part pushkey
+ bundle2-input-part: "pushkey" (params: 4 mandatory) supported
+ pushing key for "bookmarks:W"
+ bundle2-input: payload chunk size: 0
+ bundle2-input: part header size: 0
+ bundle2-input: end of bundle2 stream
+ bundle2-input-bundle: 3 parts total
+ running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
+ bundle2-output-bundle: "HG20", 1 parts total
+ bundle2-output: start emission of HG20 stream
+ bundle2-output: bundle parameter:
+ bundle2-output: start of parts
+ bundle2-output: bundle part: "reply:pushkey"
+ bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
+ bundle2-output: part 0: "REPLY:PUSHKEY"
+ bundle2-output: header chunk size: 43
+ bundle2-output: closing payload chunk
+ bundle2-output: end of bundle
+ bundle2-input: start processing of HG20 stream
+ bundle2-input: reading bundle2 stream parameters
+ bundle2-input-bundle: no-transaction
+ bundle2-input: start extraction of bundle2 parts
+ bundle2-input: part header size: 43
+ bundle2-input: part type: "REPLY:PUSHKEY"
+ bundle2-input: part id: "0"
+ bundle2-input: part parameters: 2
+ bundle2-input: found a handler for part reply:pushkey
+ bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
+ bundle2-input: payload chunk size: 0
+ bundle2-input: part header size: 0
+ bundle2-input: end of bundle2 stream
+ bundle2-input-bundle: 0 parts total
deleting remote bookmark W
+ listing keys for "phases"
[1]
+#endif
+#if b2-binary
+
+ $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
+ pushing to ../a
+ query 1; heads
+ searching for changes
+ all remote heads known locally
+ listing keys for "phases"
+ checking for updated bookmarks
+ listing keys for "bookmarks"
+ no changes found
+ bundle2-output-bundle: "HG20", 4 parts total
+ bundle2-output: start emission of HG20 stream
+ bundle2-output: bundle parameter:
+ bundle2-output: start of parts
+ bundle2-output: bundle part: "replycaps"
+ bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output: part 0: "REPLYCAPS"
+ bundle2-output: header chunk size: 16
+ bundle2-output: payload chunk size: 205
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "check:bookmarks"
+ bundle2-output-part: "check:bookmarks" 23 bytes payload
+ bundle2-output: part 1: "CHECK:BOOKMARKS"
+ bundle2-output: header chunk size: 22
+ bundle2-output: payload chunk size: 23
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "check:phases"
+ bundle2-output-part: "check:phases" 48 bytes payload
+ bundle2-output: part 2: "CHECK:PHASES"
+ bundle2-output: header chunk size: 19
+ bundle2-output: payload chunk size: 48
+ bundle2-output: closing payload chunk
+ bundle2-output: bundle part: "bookmarks"
+ bundle2-output-part: "bookmarks" 23 bytes payload
+ bundle2-output: part 3: "BOOKMARKS"
+ bundle2-output: header chunk size: 16
+ bundle2-output: payload chunk size: 23
+ bundle2-output: closing payload chunk
+ bundle2-output: end of bundle
+ bundle2-input: start processing of HG20 stream
+ bundle2-input: reading bundle2 stream parameters
+ bundle2-input-bundle: with-transaction
+ bundle2-input: start extraction of bundle2 parts
+ bundle2-input: part header size: 16
+ bundle2-input: part type: "REPLYCAPS"
+ bundle2-input: part id: "0"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part replycaps
+ bundle2-input-part: "replycaps" supported
+ bundle2-input: payload chunk size: 205
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 205
+ bundle2-input: part header size: 22
+ bundle2-input: part type: "CHECK:BOOKMARKS"
+ bundle2-input: part id: "1"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part check:bookmarks
+ bundle2-input-part: "check:bookmarks" supported
+ bundle2-input: payload chunk size: 23
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 23
+ bundle2-input: part header size: 19
+ bundle2-input: part type: "CHECK:PHASES"
+ bundle2-input: part id: "2"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part check:phases
+ bundle2-input-part: "check:phases" supported
+ bundle2-input: payload chunk size: 48
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 48
+ bundle2-input: part header size: 16
+ bundle2-input: part type: "BOOKMARKS"
+ bundle2-input: part id: "3"
+ bundle2-input: part parameters: 0
+ bundle2-input: found a handler for part bookmarks
+ bundle2-input-part: "bookmarks" supported
+ bundle2-input: payload chunk size: 23
+ bundle2-input: payload chunk size: 0
+ bundle2-input-part: total payload size 23
+ bundle2-input: part header size: 0
+ bundle2-input: end of bundle2 stream
+ bundle2-input-bundle: 3 parts total
+ running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
+ test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
+ bundle2-output-bundle: "HG20", 0 parts total
+ bundle2-output: start emission of HG20 stream
+ bundle2-output: bundle parameter:
+ bundle2-output: start of parts
+ bundle2-output: end of bundle
+ bundle2-input: start processing of HG20 stream
+ bundle2-input: reading bundle2 stream parameters
+ bundle2-input-bundle: no-transaction
+ bundle2-input: start extraction of bundle2 parts
+ bundle2-input: part header size: 0
+ bundle2-input: end of bundle2 stream
+ bundle2-input-bundle: 0 parts total
+ deleting remote bookmark W
+ listing keys for "phases"
+ [1]
+
+#endif
+
export the active bookmark
$ hg bookmark V
@@ -192,7 +409,7 @@
* foobar 1:9b140be10808
$ hg pull --config paths.foo=../a foo --config "$TESTHOOK"
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -268,7 +485,7 @@
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark X)
$ hg pull --config paths.foo=../a foo -B . --config "$TESTHOOK"
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
no changes found
divergent bookmark @ stored as @foo
importing bookmark X
@@ -623,12 +840,12 @@
be exchanged)
$ hg -R repo1 incoming -B
- comparing with $TESTTMP/bmcomparison/source (glob)
+ comparing with $TESTTMP/bmcomparison/source
searching for changed bookmarks
no changed bookmarks found
[1]
$ hg -R repo1 outgoing -B
- comparing with $TESTTMP/bmcomparison/source (glob)
+ comparing with $TESTTMP/bmcomparison/source
searching for changed bookmarks
no changed bookmarks found
[1]
@@ -772,7 +989,7 @@
$ echo 2 > f2
$ hg ci -qAmr
$ hg push -B X
- pushing to $TESTTMP/addmarks (glob)
+ pushing to $TESTTMP/addmarks
searching for changes
remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
abort: push creates new remote head 54694f811df9 with bookmark 'X'!
@@ -852,19 +1069,36 @@
Local push
----------
+#if b2-pushkey
+
$ hg push -B @ local
- pushing to $TESTTMP/issue4455-dest (glob)
+ pushing to $TESTTMP/issue4455-dest
searching for changes
no changes found
pushkey-abort: prepushkey hook exited with status 1
abort: exporting bookmark @ failed!
[255]
+
+#endif
+#if b2-binary
+
+ $ hg push -B @ local
+ pushing to $TESTTMP/issue4455-dest
+ searching for changes
+ no changes found
+ abort: prepushkey hook exited with status 1
+ [255]
+
+#endif
+
$ hg -R ../issue4455-dest/ bookmarks
no bookmarks set
Using ssh
---------
+#if b2-pushkey
+
$ hg push -B @ ssh # bundle2+
pushing to ssh://user@dummy/issue4455-dest
searching for changes
@@ -872,6 +1106,7 @@
remote: pushkey-abort: prepushkey hook exited with status 1
abort: exporting bookmark @ failed!
[255]
+
$ hg -R ../issue4455-dest/ bookmarks
no bookmarks set
@@ -882,12 +1117,27 @@
remote: pushkey-abort: prepushkey hook exited with status 1
exporting bookmark @ failed!
[1]
+
+#endif
+#if b2-binary
+
+ $ hg push -B @ ssh # bundle2+
+ pushing to ssh://user@dummy/issue4455-dest
+ searching for changes
+ no changes found
+ remote: prepushkey hook exited with status 1
+ abort: push failed on remote
+ [255]
+
+#endif
+
$ hg -R ../issue4455-dest/ bookmarks
no bookmarks set
Using http
----------
+#if b2-pushkey
$ hg push -B @ http # bundle2+
pushing to http://localhost:$HGPORT/
searching for changes
@@ -895,6 +1145,7 @@
remote: pushkey-abort: prepushkey hook exited with status 1
abort: exporting bookmark @ failed!
[255]
+
$ hg -R ../issue4455-dest/ bookmarks
no bookmarks set
@@ -905,5 +1156,20 @@
remote: pushkey-abort: prepushkey hook exited with status 1
exporting bookmark @ failed!
[1]
+
+#endif
+
+#if b2-binary
+
+ $ hg push -B @ ssh # bundle2+
+ pushing to ssh://user@dummy/issue4455-dest
+ searching for changes
+ no changes found
+ remote: prepushkey hook exited with status 1
+ abort: push failed on remote
+ [255]
+
+#endif
+
$ hg -R ../issue4455-dest/ bookmarks
no bookmarks set
--- a/tests/test-bookmarks-rebase.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bookmarks-rebase.t Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
$ hg rebase -s two -d one
rebasing 3:2ae46b1d99a7 "3" (two tip)
- saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-rebase.hg
$ hg log
changeset: 3:42e5ed2cdcf4
--- a/tests/test-bookmarks.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bookmarks.t Mon Jan 22 17:53:02 2018 -0500
@@ -736,7 +736,7 @@
Z 2:db815d6d32e6
x y 2:db815d6d32e6
$ hg -R ../cloned-bookmarks-manual-update-with-divergence pull
- pulling from $TESTTMP/repo (glob)
+ pulling from $TESTTMP/repo
searching for changes
adding changesets
adding manifests
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-branch-change.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,331 @@
+Testing changing branch on commits
+==================================
+
+Setup
+
+ $ cat >> $HGRCPATH << EOF
+ > [alias]
+ > glog = log -G -T "{rev}:{node|short} {desc}\n{branch} ({bookmarks})"
+ > [experimental]
+ > evolution = createmarkers
+ > [extensions]
+ > rebase=
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+ $ for ch in a b c d e; do echo foo >> $ch; hg ci -Aqm "Added "$ch; done
+ $ hg glog
+ @ 4:aa98ab95a928 Added e
+ | default ()
+ o 3:62615734edd5 Added d
+ | default ()
+ o 2:28ad74487de9 Added c
+ | default ()
+ o 1:29becc82797a Added b
+ | default ()
+ o 0:18d04c59bb5d Added a
+ default ()
+
+ $ hg branches
+ default 4:aa98ab95a928
+
+Try without passing a new branch name
+
+ $ hg branch -r .
+ abort: no branch name specified for the revisions
+ [255]
+
+Setting an invalid branch name
+
+ $ hg branch -r . a:b
+ abort: ':' cannot be used in a name
+ [255]
+ $ hg branch -r . tip
+ abort: the name 'tip' is reserved
+ [255]
+ $ hg branch -r . 1234
+ abort: cannot use an integer as a name
+ [255]
+
+Change on non-linear set of commits
+
+ $ hg branch -r 2 -r 4 foo
+ abort: cannot change branch of non-linear revisions
+ [255]
+
+Change in middle of the stack (linear commits)
+
+ $ hg branch -r 1::3 foo
+ abort: cannot change branch of changeset with children
+ [255]
+
+Change with dirty working directory
+
+ $ echo bar > a
+ $ hg branch -r . foo
+ abort: uncommitted changes
+ [255]
+
+ $ hg revert --all
+ reverting a
+
+Change on empty revision set
+
+ $ hg branch -r 'draft() - all()' foo
+ abort: empty revision set
+ [255]
+
+Changing branch on linear set of commits from head
+
+Without obsmarkers
+
+ $ hg branch -r 3:4 foo --config experimental.evolution=!
+ changed branch on 2 changesets
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/62615734edd5-e86bd13a-branch-change.hg
+ $ hg glog
+ @ 4:3938acfb5c0f Added e
+ | foo ()
+ o 3:9435da006bdc Added d
+ | foo ()
+ o 2:28ad74487de9 Added c
+ | default ()
+ o 1:29becc82797a Added b
+ | default ()
+ o 0:18d04c59bb5d Added a
+ default ()
+
+ $ hg branches
+ foo 4:3938acfb5c0f
+ default 2:28ad74487de9 (inactive)
+
+With obsmarkers
+
+ $ hg branch -r 3::4 bar
+ changed branch on 2 changesets
+ $ hg glog
+ @ 6:7c1991464886 Added e
+ | bar ()
+ o 5:1ea05e93925f Added d
+ | bar ()
+ o 2:28ad74487de9 Added c
+ | default ()
+ o 1:29becc82797a Added b
+ | default ()
+ o 0:18d04c59bb5d Added a
+ default ()
+
+ $ hg branches
+ bar 6:7c1991464886
+ default 2:28ad74487de9 (inactive)
+
+Change branch name to an existing branch
+
+ $ hg branch -r . default
+ abort: a branch of the same name already exists
+ [255]
+
+Changing on a branch head which is not topological head
+
+ $ hg branch -r 2 stable
+ abort: cannot change branch of changeset with children
+ [255]
+
+Enabling the allowunstable config and trying to change branch on a branch head
+which is not a topological head
+
+ $ echo "[experimental]" >> .hg/hgrc
+ $ echo "evolution.allowunstable=yes" >> .hg/hgrc
+ $ hg branch -r 2 foo
+ changed branch on 1 changesets
+ 2 new orphan changesets
+
+Changing branch of an obsoleted changeset
+
+ $ hg branch -r 4 foobar
+ abort: hidden revision '4' was rewritten as: 7c1991464886!
+ (use --hidden to access hidden revisions)
+ [255]
+
+ $ hg branch -r 4 --hidden foobar
+ abort: cannot change branch of a obsolete changeset
+ [255]
+
+Make sure bookmark movement is correct
+
+ $ hg bookmark b1
+ $ hg glog -r '.^::'
+ @ 6:7c1991464886 Added e
+ | bar (b1)
+ * 5:1ea05e93925f Added d
+ | bar ()
+ ~
+
+ $ hg branch -r '(.^)::' wat --debug
+ changing branch of '1ea05e93925f806d875a2163f9b76764be644636' from 'bar' to 'wat'
+ committing files:
+ d
+ committing manifest
+ committing changelog
+ new node id is 343660ccab7400da637bd6a211d07f413536d718
+ changing branch of '7c19914648869f5b02fc7fed31ddee9783fdd680' from 'bar' to 'wat'
+ committing files:
+ e
+ committing manifest
+ committing changelog
+ new node id is de1404b45a69f8cc6437d7679033ee33e9efb4ba
+ moving bookmarks ['b1'] from 7c19914648869f5b02fc7fed31ddee9783fdd680 to de1404b45a69f8cc6437d7679033ee33e9efb4ba
+ resolving manifests
+ branchmerge: False, force: False, partial: False
+ ancestor: 7c1991464886, local: 7c1991464886+, remote: de1404b45a69
+ starting 4 threads for background file closing (?)
+ changed branch on 2 changesets
+ updating the branch cache
+ invalid branchheads cache (served): tip differs
+
+ $ hg glog -r '(.^)::'
+ @ 9:de1404b45a69 Added e
+ | wat (b1)
+ * 8:343660ccab74 Added d
+ | wat ()
+ ~
+
+Make sure phase handling is correct
+
+ $ echo foo >> bar
+ $ hg ci -Aqm "added bar" --secret
+ 1 new orphan changesets
+ $ hg glog -r .
+ @ 10:8ad1294c1660 added bar
+ | wat (b1)
+ ~
+ $ hg branch -r . secret
+ changed branch on 1 changesets
+ $ hg phase -r .
+ 11: secret
+
+ $ hg branches
+ secret 11:38a9b2d53f98
+ foo 7:8a4729a5e2b8
+ wat 9:de1404b45a69 (inactive)
+ default 2:28ad74487de9 (inactive)
+ $ hg branch
+ secret
+
+Changing branch of another head, different from one on which we are
+
+ $ hg glog
+ @ 11:38a9b2d53f98 added bar
+ | secret (b1)
+ * 9:de1404b45a69 Added e
+ | wat ()
+ * 8:343660ccab74 Added d
+ | wat ()
+ | o 7:8a4729a5e2b8 Added c
+ | | foo ()
+ x | 2:28ad74487de9 Added c
+ |/ default ()
+ o 1:29becc82797a Added b
+ | default ()
+ o 0:18d04c59bb5d Added a
+ default ()
+
+ $ hg branch
+ secret
+
+ $ hg branch -r 7 foobar
+ changed branch on 1 changesets
+
+The current branch must be preserved
+ $ hg branch
+ secret
+
+Changing branch on multiple heads at once
+
+ $ hg rebase -s 8 -d 12 --keepbranches -q
+
+ $ hg rebase -s 14 -d 1 --keepbranches -q
+
+ $ hg branch -r 0: stable
+ changed branch on 6 changesets
+ $ hg glog
+ @ 23:6a5ddbcfb870 added bar
+ | stable (b1)
+ o 22:baedc6e98a67 Added e
+ | stable ()
+ | o 21:99ac7bf8aad1 Added d
+ | | stable ()
+ | o 20:0ecb4d39c4bd Added c
+ |/ stable ()
+ o 19:fd45b986b109 Added b
+ | stable ()
+ o 18:204d2769eca2 Added a
+ stable ()
+
+ $ hg branches
+ stable 23:6a5ddbcfb870
+
+ $ hg branch
+ stable
+
+Changing to same branch is no-op
+
+ $ hg branch -r 19::21 stable
+ changed branch on 0 changesets
+
+Changing branch name to existing branch name if the branch of parent of root of
+revs is same as the new branch name
+
+ $ hg branch -r 20::21 bugfix
+ changed branch on 2 changesets
+ $ hg glog
+ o 25:714defe1cf34 Added d
+ | bugfix ()
+ o 24:98394def28fc Added c
+ | bugfix ()
+ | @ 23:6a5ddbcfb870 added bar
+ | | stable (b1)
+ | o 22:baedc6e98a67 Added e
+ |/ stable ()
+ o 19:fd45b986b109 Added b
+ | stable ()
+ o 18:204d2769eca2 Added a
+ stable ()
+
+ $ hg branch -r 24:25 stable
+ changed branch on 2 changesets
+ $ hg glog
+ o 27:4ec342341562 Added d
+ | stable ()
+ o 26:83f48859c2de Added c
+ | stable ()
+ | @ 23:6a5ddbcfb870 added bar
+ | | stable (b1)
+ | o 22:baedc6e98a67 Added e
+ |/ stable ()
+ o 19:fd45b986b109 Added b
+ | stable ()
+ o 18:204d2769eca2 Added a
+ stable ()
+
+Testing on merge
+
+ $ hg merge -r 26
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ hg branch -r . abcd
+ abort: outstanding uncommitted merge
+ [255]
+ $ hg ci -m "Merge commit"
+ $ hg branch -r '(.^)::' def
+ abort: cannot change branch of a merge commit
+ [255]
+
+Changing branch on public changeset
+
+ $ hg phase -r 27 -p
+ $ hg branch -r 27 def
+ abort: cannot change branch of public changesets
+ (see 'hg help phases' for details)
+ [255]
--- a/tests/test-bugzilla.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bugzilla.t Mon Jan 22 17:53:02 2018 -0500
@@ -61,7 +61,7 @@
$ cat bzmock.log && rm bzmock.log
update bugid=123, newstate={}, committer='test'
----
- changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123. (glob)
+ changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123.
details:
Fixes bug 123
----
--- a/tests/test-bundle.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bundle.t Mon Jan 22 17:53:02 2018 -0500
@@ -856,7 +856,7 @@
$ hg bundle --base 1 -r 3 ../update2bundled.hg
1 changesets found
$ hg strip -r 3
- saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg (glob)
+ saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
$ hg merge -R ../update2bundled.hg -r 3
setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-bundle2-exchange.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bundle2-exchange.t Mon Jan 22 17:53:02 2018 -0500
@@ -106,7 +106,7 @@
postclose-tip:02de42196ebe draft
txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
$ hg -R other pull -r 24b6387c8c8c
- pulling from $TESTTMP/main (glob)
+ pulling from $TESTTMP/main
searching for changes
adding changesets
adding manifests
@@ -137,7 +137,7 @@
postclose-tip:02de42196ebe draft
txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
$ hg -R other pull -r 24b6387c8c8c
- pulling from $TESTTMP/main (glob)
+ pulling from $TESTTMP/main
no changes found
pre-close-tip:24b6387c8c8c public
postclose-tip:24b6387c8c8c public
@@ -157,7 +157,7 @@
pull empty
$ hg -R other pull -r 24b6387c8c8c
- pulling from $TESTTMP/main (glob)
+ pulling from $TESTTMP/main
no changes found
pre-close-tip:24b6387c8c8c public
postclose-tip:24b6387c8c8c public
@@ -253,7 +253,7 @@
remote: added 1 changesets with 0 changes to 0 files (-1 heads)
remote: 1 new obsolescence markers
remote: pre-close-tip:eea13746799a public book_eea1
- remote: pushkey: lock state after "bookmarks"
+ remote: pushkey: lock state after "bookmark"
remote: lock: free
remote: wlock: free
remote: postclose-tip:eea13746799a public book_eea1
@@ -339,7 +339,7 @@
remote: added 1 changesets with 1 changes to 1 files
remote: 1 new obsolescence markers
remote: pre-close-tip:5fddd98957c8 draft book_5fdd
- remote: pushkey: lock state after "bookmarks"
+ remote: pushkey: lock state after "bookmark"
remote: lock: free
remote: wlock: free
remote: postclose-tip:5fddd98957c8 draft book_5fdd
@@ -390,7 +390,7 @@
remote: added 1 changesets with 1 changes to 1 files
remote: 1 new obsolescence markers
remote: pre-close-tip:32af7686d403 public book_32af
- remote: pushkey: lock state after "bookmarks"
+ remote: pushkey: lock state after "bookmark"
remote: lock: free
remote: wlock: free
remote: postclose-tip:32af7686d403 public book_32af
--- a/tests/test-bundle2-multiple-changegroups.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-bundle2-multiple-changegroups.t Mon Jan 22 17:53:02 2018 -0500
@@ -13,24 +13,24 @@
> # in 'heads' as intermediate heads for the first changegroup.
> intermediates = [repo[r].p1().node() for r in heads]
> outgoing = discovery.outgoing(repo, common, intermediates)
- > cg = changegroup.makechangegroup(repo, outgoing, '01',
+ > cg = changegroup.makechangegroup(repo, outgoing, b'01',
> source, bundlecaps=bundlecaps)
- > bundler.newpart('output', data='changegroup1')
- > bundler.newpart('changegroup', data=cg.getchunks())
+ > bundler.newpart(b'output', data=b'changegroup1')
+ > bundler.newpart(b'changegroup', data=cg.getchunks())
> outgoing = discovery.outgoing(repo, common + intermediates, heads)
- > cg = changegroup.makechangegroup(repo, outgoing, '01',
+ > cg = changegroup.makechangegroup(repo, outgoing, b'01',
> source, bundlecaps=bundlecaps)
- > bundler.newpart('output', data='changegroup2')
- > bundler.newpart('changegroup', data=cg.getchunks())
+ > bundler.newpart(b'output', data=b'changegroup2')
+ > bundler.newpart(b'changegroup', data=cg.getchunks())
>
> def _pull(repo, *args, **kwargs):
> pullop = _orig_pull(repo, *args, **kwargs)
- > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
+ > repo.ui.write(b'pullop.cgresult is %d\n' % pullop.cgresult)
> return pullop
>
> _orig_pull = exchange.pull
> exchange.pull = _pull
- > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
+ > exchange.getbundle2partsmapping[b'changegroup'] = _getbundlechangegrouppart
> EOF
$ cat >> $HGRCPATH << EOF
@@ -74,7 +74,7 @@
Pull the new commits in the clone
$ hg pull
- pulling from $TESTTMP/repo (glob)
+ pulling from $TESTTMP/repo
searching for changes
remote: changegroup1
adding changesets
@@ -145,7 +145,7 @@
$ cd ../clone
$ hg pull
- pulling from $TESTTMP/repo (glob)
+ pulling from $TESTTMP/repo
searching for changes
remote: changegroup1
adding changesets
@@ -219,7 +219,7 @@
$ cd ../clone
$ hg pull
- pulling from $TESTTMP/repo (glob)
+ pulling from $TESTTMP/repo
searching for changes
remote: changegroup1
adding changesets
--- a/tests/test-cache-abuse.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-cache-abuse.t Mon Jan 22 17:53:02 2018 -0500
@@ -70,6 +70,11 @@
$ damage "tags --hidden" tags2
$ damage tags tags2-visible
$ damage "tag -f t3" hgtagsfnodes1
+ 1 new orphan changesets
+ 1 new orphan changesets
+ 1 new orphan changesets
+ 1 new orphan changesets
+ 1 new orphan changesets
Beat up branch caches:
--- a/tests/test-casefolding.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-casefolding.t Mon Jan 22 17:53:02 2018 -0500
@@ -178,8 +178,8 @@
$ echo 'foo' > a/B/c/D/E
$ hg ci -m 'e content change'
$ hg revert --all -r 0
- removing a/B/c/D/E (glob)
- adding a/B/c/D/e (glob)
+ removing a/B/c/D/E
+ adding a/B/c/D/e
$ find * | sort
a
a/B
--- a/tests/test-cat.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-cat.t Mon Jan 22 17:53:02 2018 -0500
@@ -66,9 +66,9 @@
Test template output
$ hg --cwd tmp cat ../b ../c -T '== {path} ({abspath}) ==\n{data}'
- == ../b (b) == (glob)
+ == ../b (b) ==
1
- == ../c (c) == (glob)
+ == ../c (c) ==
3
$ hg cat b c -Tjson --output -
@@ -119,3 +119,13 @@
$ PATTERN='t4' hg log -r '.' -T "{envvars % '{key} -> {value}\n'}" \
> --config "experimental.exportableenviron=PATTERN"
PATTERN -> t4
+
+Test behavior of output when directory structure does not already exist
+
+ $ mkdir foo
+ $ echo a > foo/a
+ $ hg add foo/a
+ $ hg commit -qm "add foo/a"
+ $ hg cat --output "output/%p" foo/a
+ $ cat output/foo/a
+ a
--- a/tests/test-censor.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-censor.t Mon Jan 22 17:53:02 2018 -0500
@@ -353,7 +353,7 @@
checking files
2 files, 1 changesets, 2 total revisions
$ hg pull -r $H1 -r $H2
- pulling from $TESTTMP/r (glob)
+ pulling from $TESTTMP/r
searching for changes
adding changesets
adding manifests
@@ -398,7 +398,7 @@
$ hg cat -r $CLEANREV target
Re-sanitized; nothing to see here
$ hg push -f -r $H2
- pushing to $TESTTMP/r (glob)
+ pushing to $TESTTMP/r
searching for changes
adding changesets
adding manifests
--- a/tests/test-check-clang-format.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-check-clang-format.t Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
$ . "$TESTDIR/helpers-testrepo.sh"
$ cd "$TESTDIR"/..
- $ for f in `testrepohg files 'set:(**.c or **.h) and not "listfile:contrib/clang-format-blacklist"'` ; do
+ $ for f in `testrepohg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-blacklist"'` ; do
> clang-format --style file $f > $f.formatted
> cmp $f $f.formatted || diff -u $f $f.formatted
> rm $f.formatted
--- a/tests/test-check-code.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-check-code.t Mon Jan 22 17:53:02 2018 -0500
@@ -15,7 +15,6 @@
Skipping i18n/polib.py it has no-che?k-code (glob)
Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob)
Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
- Skipping mercurial/selectors2.py it has no-che?k-code (glob)
Skipping mercurial/statprof.py it has no-che?k-code (glob)
Skipping tests/badserverext.py it has no-che?k-code (glob)
@@ -44,6 +43,7 @@
.hgignore
.hgsigs
.hgtags
+ .jshintrc
CONTRIBUTING
CONTRIBUTORS
COPYING
--- a/tests/test-check-config.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-check-config.t Mon Jan 22 17:53:02 2018 -0500
@@ -33,7 +33,7 @@
$ $PYTHON contrib/check-config.py < $TESTTMP/files
foo = ui.configint('ui', 'intdefault', default=42)
conflict on ui.intdefault: ('int', '42') != ('int', '1')
- at $TESTTMP/testfile.py:12: (glob)
+ at $TESTTMP/testfile.py:12:
undocumented: ui.doesnotexist (str)
undocumented: ui.intdefault (int) [42]
undocumented: ui.intdefault2 (int) [42]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-check-jshint.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,11 @@
+#require test-repo jshint hg10
+
+ $ . "$TESTDIR/helpers-testrepo.sh"
+
+run jshint on all tracked files ending in .js except vendored dependencies
+
+ $ cd "`dirname "$TESTDIR"`"
+
+ $ testrepohg locate 'set:**.js' \
+ > 2>/dev/null \
+ > | xargs jshint
--- a/tests/test-check-pylint.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-check-pylint.t Mon Jan 22 17:53:02 2018 -0500
@@ -11,7 +11,8 @@
$ touch $TESTTMP/fakerc
$ pylint --rcfile=$TESTTMP/fakerc --disable=all \
- > --enable=W0102 --reports=no \
+ > --enable=W0102,C0321 \
+ > --reports=no \
> --ignore=thirdparty \
> mercurial hgdemandimport hgext hgext3rd
(?)
--- a/tests/test-clone-uncompressed.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-clone-uncompressed.t Mon Jan 22 17:53:02 2018 -0500
@@ -1,5 +1,14 @@
#require serve
+#testcases stream-legacy stream-bundle2
+
+#if stream-bundle2
+ $ cat << EOF >> $HGRCPATH
+ > [experimental]
+ > bundle2.stream = yes
+ > EOF
+#endif
+
Initialize repository
the status call is to check for issue5130
@@ -12,30 +21,222 @@
... fh.write(str(i))
$ hg -q commit -A -m 'add a lot of files'
$ hg st
+ $ hg --config server.uncompressed=false serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+Cannot stream clone when server.uncompressed is set
+
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=stream_out'
+ 200 Script output follows
+
+ 1
+
+#if stream-legacy
+ $ hg debugcapabilities http://localhost:$HGPORT
+ Main capabilities:
+ batch
+ branchmap
+ $USUAL_BUNDLE2_CAPS_SERVER$
+ changegroupsubset
+ compression=zstd,zlib
+ getbundle
+ httpheader=1024
+ httpmediatype=0.1rx,0.1tx,0.2tx
+ known
+ lookup
+ pushkey
+ unbundle=HG10GZ,HG10BZ,HG10UN
+ unbundlehash
+ Bundle2 capabilities:
+ HG20
+ bookmarks
+ changegroup
+ 01
+ 02
+ digests
+ md5
+ sha1
+ sha512
+ error
+ abort
+ unsupportedcontent
+ pushraced
+ pushkey
+ hgtagsfnodes
+ listkeys
+ phases
+ heads
+ pushkey
+ remote-changegroup
+ http
+ https
+
+ $ hg clone --stream -U http://localhost:$HGPORT server-disabled
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1025 changes to 1025 files
+ new changesets 96ee1d7354c4:c17445101a72
+
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ 200 Script output follows
+ content-type: application/mercurial-0.2
+
+
+ $ f --size body --hexdump --bytes 100
+ body: size=232
+ 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
+ 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
+ 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
+ 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
+ 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
+ 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
+ 0060: 69 73 20 66 |is f|
+
+#endif
+#if stream-bundle2
+ $ hg debugcapabilities http://localhost:$HGPORT
+ Main capabilities:
+ batch
+ branchmap
+ $USUAL_BUNDLE2_CAPS_SERVER$
+ changegroupsubset
+ compression=zstd,zlib
+ getbundle
+ httpheader=1024
+ httpmediatype=0.1rx,0.1tx,0.2tx
+ known
+ lookup
+ pushkey
+ unbundle=HG10GZ,HG10BZ,HG10UN
+ unbundlehash
+ Bundle2 capabilities:
+ HG20
+ bookmarks
+ changegroup
+ 01
+ 02
+ digests
+ md5
+ sha1
+ sha512
+ error
+ abort
+ unsupportedcontent
+ pushraced
+ pushkey
+ hgtagsfnodes
+ listkeys
+ phases
+ heads
+ pushkey
+ remote-changegroup
+ http
+ https
+
+ $ hg clone --stream -U http://localhost:$HGPORT server-disabled
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1025 changes to 1025 files
+ new changesets 96ee1d7354c4:c17445101a72
+
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ 200 Script output follows
+ content-type: application/mercurial-0.2
+
+
+ $ f --size body --hexdump --bytes 100
+ body: size=232
+ 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
+ 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
+ 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
+ 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
+ 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
+ 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
+ 0060: 69 73 20 66 |is f|
+
+#endif
+
+ $ killdaemons.py
+ $ cd server
$ hg serve -p $HGPORT -d --pid-file=hg.pid
- $ cat hg.pid >> $DAEMON_PIDS
+ $ cat hg.pid > $DAEMON_PIDS
$ cd ..
Basic clone
+#if stream-legacy
$ hg clone --stream -U http://localhost:$HGPORT clone1
streaming all changes
1027 files to transfer, 96.3 KB of data
transferred 96.3 KB in * seconds (*/sec) (glob)
searching for changes
no changes found
+#endif
+#if stream-bundle2
+ $ hg clone --stream -U http://localhost:$HGPORT clone1
+ streaming all changes
+ 1030 files to transfer, 96.4 KB of data
+ transferred 96.4 KB in * seconds (* */sec) (glob)
+
+ $ ls -1 clone1/.hg/cache
+ branch2-served
+ rbc-names-v1
+ rbc-revs-v1
+#endif
+
+getbundle requests with stream=1 are uncompressed
+
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ 200 Script output follows
+ content-type: application/mercurial-0.2
+
+
+ $ f --size --hex --bytes 256 body
+ body: size=112222
+ 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
+ 0010: 68 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |h.STREAM2.......|
+ 0020: 05 09 04 0c 2d 62 79 74 65 63 6f 75 6e 74 39 38 |....-bytecount98|
+ 0030: 37 35 38 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |758filecount1030|
+ 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote|
+ 0050: 6e 63 6f 64 65 20 66 6e 63 61 63 68 65 20 67 65 |ncode fncache ge|
+ 0060: 6e 65 72 61 6c 64 65 6c 74 61 20 72 65 76 6c 6f |neraldelta revlo|
+ 0070: 67 76 31 20 73 74 6f 72 65 00 00 80 00 73 08 42 |gv1 store....s.B|
+ 0080: 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 00 |data/0.i........|
+ 0090: 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 01 |................|
+ 00a0: ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 87 |.........)c.I.#.|
+ 00b0: bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 00 |...Vg.g,i..9....|
+ 00c0: 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 74 |........u0s.Bdat|
+ 00d0: 61 2f 31 2e 69 00 03 00 01 00 00 00 00 00 00 00 |a/1.i...........|
+ 00e0: 02 00 00 00 01 00 00 00 00 00 00 00 01 ff ff ff |................|
+ 00f0: ff ff ff ff ff f9 76 da 1d 0d f2 25 6c de 08 db |......v....%l...|
--uncompressed is an alias to --stream
+#if stream-legacy
$ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
streaming all changes
1027 files to transfer, 96.3 KB of data
transferred 96.3 KB in * seconds (*/sec) (glob)
searching for changes
no changes found
+#endif
+#if stream-bundle2
+ $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
+ streaming all changes
+ 1030 files to transfer, 96.4 KB of data
+ transferred 96.4 KB in * seconds (* */sec) (glob)
+#endif
Clone with background file closing enabled
+#if stream-legacy
$ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
using http://localhost:$HGPORT/
sending capabilities command
@@ -57,6 +258,27 @@
bundle2-input-part: total payload size 24
bundle2-input-bundle: 1 parts total
checking for updated bookmarks
+#endif
+#if stream-bundle2
+ $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
+ using http://localhost:$HGPORT/
+ sending capabilities command
+ query 1; heads
+ sending batch command
+ streaming all changes
+ sending getbundle command
+ bundle2-input-bundle: with-transaction
+ bundle2-input-part: "stream2" (params: 3 mandatory) supported
+ applying stream bundle
+ 1030 files to transfer, 96.4 KB of data
+ starting 4 threads for background file closing
+ starting 4 threads for background file closing
+ transferred 96.4 KB in * seconds (* */sec) (glob)
+ bundle2-input-part: total payload size 112077
+ bundle2-input-part: "listkeys" (params: 1 mandatory) supported
+ bundle2-input-bundle: 1 parts total
+ checking for updated bookmarks
+#endif
Cannot stream clone when there are secret changesets
@@ -79,12 +301,20 @@
$ cat hg.pid > $DAEMON_PIDS
$ cd ..
+#if stream-legacy
$ hg clone --stream -U http://localhost:$HGPORT secret-allowed
streaming all changes
1027 files to transfer, 96.3 KB of data
transferred 96.3 KB in * seconds (*/sec) (glob)
searching for changes
no changes found
+#endif
+#if stream-bundle2
+ $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
+ streaming all changes
+ 1030 files to transfer, 96.4 KB of data
+ transferred 96.4 KB in * seconds (* */sec) (glob)
+#endif
$ killdaemons.py
@@ -171,3 +401,106 @@
$ wait
$ hg -R clone id
000000000000
+ $ cd ..
+
+Stream repository with bookmarks
+--------------------------------
+
+(revert introduction of secret changeset)
+
+ $ hg -R server phase --draft 'secret()'
+
+add a bookmark
+
+ $ hg -R server bookmark -r tip some-bookmark
+
+clone it
+
+#if stream-legacy
+ $ hg clone --stream http://localhost:$HGPORT with-bookmarks
+ streaming all changes
+ 1027 files to transfer, 96.3 KB of data
+ transferred 96.3 KB in * seconds (*) (glob)
+ searching for changes
+ no changes found
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+#endif
+#if stream-bundle2
+ $ hg clone --stream http://localhost:$HGPORT with-bookmarks
+ streaming all changes
+ 1033 files to transfer, 96.6 KB of data
+ transferred 96.6 KB in * seconds (* */sec) (glob)
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+#endif
+ $ hg -R with-bookmarks bookmarks
+ some-bookmark 1:c17445101a72
+
+Stream repository with phases
+-----------------------------
+
+Clone as publishing
+
+ $ hg -R server phase -r 'all()'
+ 0: draft
+ 1: draft
+
+#if stream-legacy
+ $ hg clone --stream http://localhost:$HGPORT phase-publish
+ streaming all changes
+ 1027 files to transfer, 96.3 KB of data
+ transferred 96.3 KB in * seconds (*) (glob)
+ searching for changes
+ no changes found
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+#endif
+#if stream-bundle2
+ $ hg clone --stream http://localhost:$HGPORT phase-publish
+ streaming all changes
+ 1033 files to transfer, 96.6 KB of data
+ transferred 96.6 KB in * seconds (* */sec) (glob)
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+#endif
+ $ hg -R phase-publish phase -r 'all()'
+ 0: public
+ 1: public
+
+Clone as non publishing
+
+ $ cat << EOF >> server/.hg/hgrc
+ > [phases]
+ > publish = False
+ > EOF
+ $ killdaemons.py
+ $ hg -R server serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+
+#if stream-legacy
+ $ hg clone --stream http://localhost:$HGPORT phase-no-publish
+ streaming all changes
+ 1027 files to transfer, 96.3 KB of data
+ transferred 96.3 KB in * seconds (*) (glob)
+ searching for changes
+ no changes found
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R phase-no-publish phase -r 'all()'
+ 0: public
+ 1: public
+#endif
+#if stream-bundle2
+ $ hg clone --stream http://localhost:$HGPORT phase-no-publish
+ streaming all changes
+ 1034 files to transfer, 96.7 KB of data
+ transferred 96.7 KB in * seconds (* */sec) (glob)
+ updating to branch default
+ 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R phase-no-publish phase -r 'all()'
+ 0: draft
+ 1: draft
+#endif
+
+ $ killdaemons.py
--- a/tests/test-clone.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-clone.t Mon Jan 22 17:53:02 2018 -0500
@@ -138,7 +138,7 @@
$ hg clone -q -U --config 'paths.foobar=a#0' foobar f
$ hg -R f showconfig paths.default
- $TESTTMP/a#0 (glob)
+ $TESTTMP/a#0
Use --pull:
@@ -808,7 +808,7 @@
The destination should point to it
$ cat share-dest1a/.hg/sharedpath; echo
- $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
+ $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
The destination should have bookmarks
@@ -818,7 +818,7 @@
The default path should be the remote, not the share
$ hg -R share-dest1a config paths.default
- $TESTTMP/source1a (glob)
+ $TESTTMP/source1a
Clone with existing share dir should result in pull + share
@@ -839,7 +839,7 @@
b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
$ cat share-dest1b/.hg/sharedpath; echo
- $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
+ $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
We only get bookmarks from the remote, not everything in the share
@@ -850,7 +850,7 @@
Default path should be source, not share.
$ hg -R share-dest1b config paths.default
- $TESTTMP/source1b (glob)
+ $TESTTMP/source1b
Checked out revision should be head of default branch
--- a/tests/test-clonebundles.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-clonebundles.t Mon Jan 22 17:53:02 2018 -0500
@@ -32,8 +32,8 @@
$ cat server/access.log
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
Empty manifest file results in retrieval
(the extension only checks if the manifest file exists)
@@ -517,3 +517,30 @@
transferred 613 bytes in * seconds (*) (glob)
searching for changes
no changes found
+
+Test clone bundle retrieved through bundle2
+
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > largefiles=
+ > EOF
+ $ killdaemons.py
+ $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
+ $ cat hg.pid >> $DAEMON_PIDS
+
+ $ hg -R server debuglfput gz-a.hg
+ f6eca29e25359f6a92f1ea64527cdcf1b5abe62a
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > largefile://f6eca29e25359f6a92f1ea64527cdcf1b5abe62a BUNDLESPEC=gzip-v2
+ > EOF
+
+ $ hg clone -U http://localhost:$HGPORT largefile-provided --traceback
+ applying clone bundle from largefile://f6eca29e25359f6a92f1ea64527cdcf1b5abe62a
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 2 files
+ finished applying clone bundle
+ searching for changes
+ no changes found
--- a/tests/test-command-template.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-command-template.t Mon Jan 22 17:53:02 2018 -0500
@@ -204,6 +204,16 @@
$ hg log -r 'wdir()' -T '{manifest}\n'
+Internal resources shouldn't be exposed (issue5699):
+
+ $ hg log -r. -T '{cache}{ctx}{repo}{revcache}{templ}{ui}'
+
+Never crash on internal resource not available:
+
+ $ hg --cwd .. debugtemplate '{"c0bebeef"|shortest}\n'
+ abort: template resource not available: ctx
+ [255]
+
Quoting for ui.logtemplate
$ hg tip --config "ui.logtemplate={rev}\n"
@@ -2751,6 +2761,25 @@
$ hg log -T '{date'
hg: parse error at 1: unterminated template expansion
[255]
+ $ hg log -T '{date(}'
+ hg: parse error at 7: not a prefix: end
+ [255]
+ $ hg log -T '{date)}'
+ hg: parse error at 5: invalid token
+ [255]
+ $ hg log -T '{date date}'
+ hg: parse error at 6: invalid token
+ [255]
+
+ $ hg log -T '{}'
+ hg: parse error at 2: not a prefix: end
+ [255]
+ $ hg debugtemplate -v '{()}'
+ (template
+ (group
+ None))
+ hg: parse error: missing argument
+ [255]
Behind the scenes, this will throw TypeError
@@ -2881,6 +2910,17 @@
@@ -0,0 +1,1 @@
+second
+ui verbosity:
+
+ $ hg log -l1 -T '{verbosity}\n'
+
+ $ hg log -l1 -T '{verbosity}\n' --debug
+ debug
+ $ hg log -l1 -T '{verbosity}\n' --quiet
+ quiet
+ $ hg log -l1 -T '{verbosity}\n' --verbose
+ verbose
+
$ cd ..
@@ -4064,6 +4104,48 @@
5:13207e5a10d9fd28ec424934298e176197f2c67f,
4:bbe44766e73d5f11ed2177f1838de10c53ef3e74
+Invalid arguments passed to revset()
+
+ $ hg log -T '{revset("%whatever", 0)}\n'
+ hg: parse error: unexpected revspec format character w
+ [255]
+ $ hg log -T '{revset("%lwhatever", files)}\n'
+ hg: parse error: unexpected revspec format character w
+ [255]
+ $ hg log -T '{revset("%s %s", 0)}\n'
+ hg: parse error: missing argument for revspec
+ [255]
+ $ hg log -T '{revset("", 0)}\n'
+ hg: parse error: too many revspec arguments specified
+ [255]
+ $ hg log -T '{revset("%s", 0, 1)}\n'
+ hg: parse error: too many revspec arguments specified
+ [255]
+ $ hg log -T '{revset("%", 0)}\n'
+ hg: parse error: incomplete revspec format character
+ [255]
+ $ hg log -T '{revset("%l", 0)}\n'
+ hg: parse error: incomplete revspec format character
+ [255]
+ $ hg log -T '{revset("%d", 'foo')}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+ $ hg log -T '{revset("%ld", files)}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+ $ hg log -T '{revset("%ls", 0)}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+ $ hg log -T '{revset("%b", 'foo')}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+ $ hg log -T '{revset("%lb", files)}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+ $ hg log -T '{revset("%r", 0)}\n'
+ hg: parse error: invalid argument for revspec
+ [255]
+
Test files function
$ hg log -T "{rev}\n{join(files('*'), '\n')}\n"
--- a/tests/test-commandserver.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-commandserver.t Mon Jan 22 17:53:02 2018 -0500
@@ -207,6 +207,7 @@
devel.default-date=0 0
extensions.fsmonitor= (fsmonitor !)
largefiles.usercache=$TESTTMP/.cache/largefiles
+ lfs.usercache=$TESTTMP/.cache/lfs
ui.slash=True
ui.interactive=False
ui.mergemarkers=detailed
--- a/tests/test-commit-amend.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-commit-amend.t Mon Jan 22 17:53:02 2018 -0500
@@ -16,6 +16,7 @@
$ hg phase -r . -p
$ hg ci --amend
abort: cannot amend public changesets
+ (see 'hg help phases' for details)
[255]
$ hg phase -r . -f -d
@@ -40,7 +41,7 @@
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -m 'amend base1'
pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149
43f1ba15f28a tip
- saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-5ab4f721-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-5ab4f721-amend.hg
$ echo 'pretxncommit.foo = ' >> $HGRCPATH
$ hg diff -c .
diff -r ad120869acf0 -r 43f1ba15f28a a
@@ -98,7 +99,7 @@
Add new file along with modified existing file:
$ hg ci --amend -m 'amend base1 new file'
- saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-007467c2-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-007467c2-amend.hg
Remove file that was added in amended commit:
(and test logfile option)
@@ -107,7 +108,7 @@
$ hg rm b
$ echo 'amend base1 remove new file' > ../logfile
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg ci --amend --logfile ../logfile
- saved backup bundle to $TESTTMP/.hg/strip-backup/c16295aaf401-1ada9901-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c16295aaf401-1ada9901-amend.hg
$ hg cat b
b: no such file in rev 47343646fa3d
@@ -127,7 +128,7 @@
254 (changelog)
163 (manifests)
131 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/47343646fa3d-c2758885-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/47343646fa3d-c2758885-amend.hg
1 changesets found
uncompressed size of bundle content:
250 (changelog)
@@ -174,10 +175,10 @@
> EOF
$ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -u foo -d '1 0'
HGEDITFORM=commit.amend.normal
- saved backup bundle to $TESTTMP/.hg/strip-backup/401431e913a1-5e8e532c-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/401431e913a1-5e8e532c-amend.hg
$ echo a >> a
$ hg ci --amend -u foo -d '1 0'
- saved backup bundle to $TESTTMP/.hg/strip-backup/d96b1d28ae33-677e0afb-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/d96b1d28ae33-677e0afb-amend.hg
$ hg log -r .
changeset: 1:a9a13940fc03
tag: tip
@@ -271,7 +272,7 @@
249 (changelog)
163 (manifests)
133 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/a9a13940fc03-7c2e8674-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/a9a13940fc03-7c2e8674-amend.hg
1 changesets found
uncompressed size of bundle content:
257 (changelog)
@@ -307,7 +308,7 @@
257 (changelog)
163 (manifests)
133 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/64a124ba1b44-10374b8f-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/64a124ba1b44-10374b8f-amend.hg
1 changesets found
uncompressed size of bundle content:
257 (changelog)
@@ -334,13 +335,13 @@
$ hg book book1
$ hg book book2
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/7892795b8e38-3fb46217-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/7892795b8e38-3fb46217-amend.hg
$ hg book
book1 1:8311f17e2616
* book2 1:8311f17e2616
$ echo a >> a
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/8311f17e2616-f0504fe3-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/8311f17e2616-f0504fe3-amend.hg
$ hg book
book1 1:a3b65065808c
* book2 1:a3b65065808c
@@ -374,7 +375,7 @@
$ hg branch default -f
marked working directory as branch default
$ hg ci --amend -m 'back to default'
- saved backup bundle to $TESTTMP/.hg/strip-backup/f8339a38efe1-c18453c9-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/f8339a38efe1-c18453c9-amend.hg
$ hg branches
default 2:9c07515f2650
@@ -390,7 +391,7 @@
$ echo b >> b
$ hg ci -mb
$ hg ci --amend --close-branch -m 'closing branch foo'
- saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-54245dc7-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-54245dc7-amend.hg
Same thing, different code path:
@@ -399,7 +400,7 @@
reopening closed branch head 4
$ echo b >> b
$ hg ci --amend --close-branch
- saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-b900d9fa-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-b900d9fa-amend.hg
$ hg branches
default 2:9c07515f2650
@@ -420,7 +421,7 @@
$ hg ci -m 'b -> c'
$ hg mv c d
$ hg ci --amend -m 'b -> d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/42f3f27a067d-f23cc9f7-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/42f3f27a067d-f23cc9f7-amend.hg
$ hg st --rev '.^' --copies d
A d
b
@@ -428,7 +429,7 @@
$ hg ci -m 'e = d'
$ hg cp e f
$ hg ci --amend -m 'f = d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/9198f73182d5-251d584a-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/9198f73182d5-251d584a-amend.hg
$ hg st --rev '.^' --copies f
A f
d
@@ -439,7 +440,7 @@
$ hg cp a f
$ mv f.orig f
$ hg ci --amend -m replacef
- saved backup bundle to $TESTTMP/.hg/strip-backup/f0993ab6b482-eda301bf-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/f0993ab6b482-eda301bf-amend.hg
$ hg st --change . --copies
$ hg log -r . --template "{file_copies}\n"
@@ -451,7 +452,7 @@
adding g
$ hg mv g h
$ hg ci --amend
- saved backup bundle to $TESTTMP/.hg/strip-backup/58585e3f095c-0f5ebcda-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/58585e3f095c-0f5ebcda-amend.hg
$ hg st --change . --copies h
A h
$ hg log -r . --template "{file_copies}\n"
@@ -471,11 +472,11 @@
$ echo a >> a
$ hg ci -ma
$ hg ci --amend -m "a'"
- saved backup bundle to $TESTTMP/.hg/strip-backup/39a162f1d65e-9dfe13d8-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/39a162f1d65e-9dfe13d8-amend.hg
$ hg log -r . --template "{branch}\n"
a
$ hg ci --amend -m "a''"
- saved backup bundle to $TESTTMP/.hg/strip-backup/d5ca7b1ac72b-0b4c1a34-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/d5ca7b1ac72b-0b4c1a34-amend.hg
$ hg log -r . --template "{branch}\n"
a
@@ -492,7 +493,7 @@
$ hg graft 12
grafting 12:2647734878ef "fork" (tip)
$ hg ci --amend -m 'graft amend'
- saved backup bundle to $TESTTMP/.hg/strip-backup/fe8c6f7957ca-25638666-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/fe8c6f7957ca-25638666-amend.hg
$ hg log -r . --debug | grep extra
extra: amend_source=fe8c6f7957ca1665ed77496ed7a07657d469ac60
extra: branch=a
@@ -604,6 +605,7 @@
babar
$ hg commit --amend
+ 1 new orphan changesets
$ hg log -r 'orphan()'
changeset: 16:37973c7e0b61
branch: a
@@ -1111,7 +1113,7 @@
marked working directory as branch newdirname
(branches are permanent and global, did you want a bookmark?)
$ hg mv olddirname newdirname
- moving olddirname/commonfile.py to newdirname/commonfile.py (glob)
+ moving olddirname/commonfile.py to newdirname/commonfile.py
$ hg ci -m rename
$ hg update default
@@ -1129,7 +1131,7 @@
$ hg ci -m add
$
$ hg debugrename newdirname/newfile.py
- newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob)
+ newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def
$ hg status -C --change .
A newdirname/newfile.py
$ hg status -C --rev 1
@@ -1148,7 +1150,7 @@
$ echo a >> newdirname/commonfile.py
$ hg ci --amend -m bug
$ hg debugrename newdirname/newfile.py
- newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob)
+ newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def
$ hg debugindex newdirname/newfile.py
rev offset length delta linkrev nodeid p1 p2
0 0 89 -1 3 34a4d536c0c0 000000000000 000000000000
--- a/tests/test-commit-interactive-curses.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-commit-interactive-curses.t Mon Jan 22 17:53:02 2018 -0500
@@ -206,7 +206,7 @@
> X
> EOF
$ hg commit -i -m "newly added file" -d "0 0"
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-3cf0bc8c-amend.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-3cf0bc8c-amend.hg
$ hg diff -c .
diff -r a6735021574d -r c1d239d165ae x
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-commit-unresolved.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-commit-unresolved.t Mon Jan 22 17:53:02 2018 -0500
@@ -21,13 +21,63 @@
$ commit "D" 3
created new head
+State before the merge
+
+ $ hg status
+ $ hg id
+ e45016d2b3d3 tip
+ $ hg summary
+ parent: 3:e45016d2b3d3 tip
+ D
+ branch: default
+ commit: (clean)
+ update: 2 new changesets, 2 branch heads (merge)
+ phases: 4 draft
+
+Testing the abort functionality first in case of conflicts
+
+ $ hg merge --abort
+ abort: no merge in progress
+ [255]
+ $ hg merge
+ merging A
+ warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
+ 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+
+ $ hg merge --abort e4501
+ abort: cannot specify a node with --abort
+ [255]
+ $ hg merge --abort --rev e4501
+ abort: cannot specify both --rev and --abort
+ [255]
+
+ $ hg merge --abort
+ aborting the merge, updating back to e45016d2b3d3
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Checking that we got back in the same state
+
+ $ hg status
+ ? A.orig
+ $ hg id
+ e45016d2b3d3 tip
+ $ hg summary
+ parent: 3:e45016d2b3d3 tip
+ D
+ branch: default
+ commit: 1 unknown (clean)
+ update: 2 new changesets, 2 branch heads (merge)
+ phases: 4 draft
+
Merging a conflict araises
$ hg merge
merging A
warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
Correct the conflict without marking the file as resolved
@@ -52,7 +102,7 @@
merging A
warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg rm --force A
$ hg commit -m merged
@@ -64,4 +114,28 @@
$ hg commit -m merged
created new head
+Testing the abort functionality in case of no conflicts
+
+ $ hg update -C 0
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ addcommit "E" 4
+ created new head
+ $ hg id
+ 68352a18a7c4 tip
+
+ $ hg merge -r 4
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ hg merge --preview --abort
+ abort: cannot specify --preview with --abort
+ [255]
+
+ $ hg merge --abort
+ aborting the merge, updating back to 68352a18a7c4
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+ $ hg id
+ 68352a18a7c4 tip
+
$ cd ..
--- a/tests/test-commit.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-commit.t Mon Jan 22 17:53:02 2018 -0500
@@ -61,7 +61,7 @@
$ mkdir dir
$ echo boo > dir/file
$ hg add
- adding dir/file (glob)
+ adding dir/file
$ hg -v commit -m commit-9 dir
committing files:
dir/file
@@ -180,8 +180,8 @@
$ mkdir bar
$ echo bar > bar/bar
$ hg add
- adding bar/bar (glob)
- adding foo/foo (glob)
+ adding bar/bar
+ adding foo/foo
$ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
commit-subdir-1
@@ -648,7 +648,8 @@
> u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
- > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
+ > return context.memfilectx(repo, memctx, path,
+ > '[hooks]\nupdate = echo owned')
> c = context.memctx(r, [r['tip'].node(), node.nullid],
> 'evil', [notrc], filectxfn, 0)
> r.commitctx(c)
@@ -673,14 +674,15 @@
> u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
- > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
+ > return context.memfilectx(repo, memctx, path,
+ > '[hooks]\nupdate = echo owned')
> c = context.memctx(r, [r['tip'].node(), node.nullid],
> 'evil', [notrc], filectxfn, 0)
> r.commitctx(c)
> EOF
$ $PYTHON evil-commit.py
$ hg co --clean tip
- abort: path contains illegal component: HG~1/hgrc (glob)
+ abort: path contains illegal component: HG~1/hgrc
[255]
$ hg rollback -f
@@ -692,14 +694,15 @@
> u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
- > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
+ > return context.memfilectx(repo, memctx, path,
+ > '[hooks]\nupdate = echo owned')
> c = context.memctx(r, [r['tip'].node(), node.nullid],
> 'evil', [notrc], filectxfn, 0)
> r.commitctx(c)
> EOF
$ $PYTHON evil-commit.py
$ hg co --clean tip
- abort: path contains illegal component: HG8B6C~2/hgrc (glob)
+ abort: path contains illegal component: HG8B6C~2/hgrc
[255]
# test that an unmodified commit template message aborts
--- a/tests/test-completion.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-completion.t Mon Jan 22 17:53:02 2018 -0500
@@ -72,6 +72,7 @@
debugapplystreamclonebundle
debugbuilddag
debugbundle
+ debugcapabilities
debugcheckstate
debugcolor
debugcommands
@@ -84,8 +85,10 @@
debugdeltachain
debugdirstate
debugdiscovery
+ debugdownload
debugextensions
debugfileset
+ debugformat
debugfsinfo
debuggetbundle
debugignore
@@ -226,7 +229,7 @@
forget: include, exclude
init: ssh, remotecmd, insecure
log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
- merge: force, rev, preview, tool
+ merge: force, rev, preview, abort, tool
pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
remove: after, force, subrepos, include, exclude
@@ -239,7 +242,7 @@
backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
bisect: reset, good, bad, skip, extend, command, noupdate
bookmarks: force, rev, delete, rename, inactive, template
- branch: force, clean
+ branch: force, clean, rev
branches: active, closed, template
bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
cat: output, rev, decode, include, exclude, template
@@ -249,6 +252,7 @@
debugapplystreamclonebundle:
debugbuilddag: mergeable-file, overwritten-file, new-file
debugbundle: all, part-type, spec
+ debugcapabilities:
debugcheckstate:
debugcolor: style
debugcommands:
@@ -259,9 +263,11 @@
debugdate: extended
debugdeltachain: changelog, manifest, dir, template
debugdirstate: nodates, datesort
- debugdiscovery: old, nonheads, ssh, remotecmd, insecure
+ debugdiscovery: old, nonheads, rev, ssh, remotecmd, insecure
+ debugdownload: output
debugextensions: template
debugfileset: rev
+ debugformat: template
debugfsinfo:
debuggetbundle: head, common, type
debugignore:
@@ -270,7 +276,7 @@
debuginstall: template
debugknown:
debuglabelcomplete:
- debuglocks: force-lock, force-wlock
+ debuglocks: force-lock, force-wlock, set-lock, set-wlock
debugmergestate:
debugnamecomplete:
debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
--- a/tests/test-config.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-config.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
> novaluekey
> EOF
$ hg showconfig
- hg: parse error at $TESTTMP/.hg/hgrc:1: novaluekey (glob)
+ hg: parse error at $TESTTMP/.hg/hgrc:1: novaluekey
[255]
Invalid syntax: no key
@@ -16,7 +16,7 @@
> =nokeyvalue
> EOF
$ hg showconfig
- hg: parse error at $TESTTMP/.hg/hgrc:1: =nokeyvalue (glob)
+ hg: parse error at $TESTTMP/.hg/hgrc:1: =nokeyvalue
[255]
Test hint about invalid syntax from leading white space
@@ -25,7 +25,7 @@
> key=value
> EOF
$ hg showconfig
- hg: parse error at $TESTTMP/.hg/hgrc:1: key=value (glob)
+ hg: parse error at $TESTTMP/.hg/hgrc:1: key=value
unexpected leading whitespace
[255]
@@ -34,7 +34,7 @@
> key=value
> EOF
$ hg showconfig
- hg: parse error at $TESTTMP/.hg/hgrc:1: [section] (glob)
+ hg: parse error at $TESTTMP/.hg/hgrc:1: [section]
unexpected leading whitespace
[255]
--- a/tests/test-conflict.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-conflict.t Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg id
@@ -91,7 +91,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat a
@@ -182,7 +182,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat a
@@ -207,7 +207,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat a
Small Mathematical Series.
@@ -254,7 +254,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg resolve --tool :merge-other a
merging a
--- a/tests/test-context.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-context.py Mon Jan 22 17:53:02 2018 -0500
@@ -32,7 +32,7 @@
# test memctx with non-ASCII commit message
def filectxfn(repo, memctx, path):
- return context.memfilectx(repo, "foo", "")
+ return context.memfilectx(repo, memctx, "foo", "")
ctx = context.memctx(repo, ['tip', None],
encoding.tolocal("Gr\xc3\xbcezi!"),
@@ -49,7 +49,7 @@
data, flags = fctx.data(), fctx.flags()
if f == 'foo':
data += 'bar\n'
- return context.memfilectx(repo, f, data, 'l' in flags, 'x' in flags)
+ return context.memfilectx(repo, memctx, f, data, 'l' in flags, 'x' in flags)
ctxa = repo.changectx(0)
ctxb = context.memctx(repo, [ctxa.node(), None], "test diff", ["foo"],
--- a/tests/test-contrib-check-code.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-contrib-check-code.t Mon Jan 22 17:53:02 2018 -0500
@@ -173,6 +173,17 @@
don't use old-style two-argument raise, use Exception(message)
[1]
+ $ cat <<EOF > tab.t
+ > indent
+ > > heredoc
+ > EOF
+ $ "$check_code" tab.t
+ tab.t:1:
+ > indent
+ don't use tabs to indent
+ [1]
+ $ rm tab.t
+
$ cat > rst.py <<EOF
> """problematic rst text
>
--- a/tests/test-contrib-perf.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-contrib-perf.t Mon Jan 22 17:53:02 2018 -0500
@@ -55,6 +55,8 @@
benchmark parsing bookmarks from disk to memory
perfbranchmap
benchmark the update of a branchmap
+ perfbundleread
+ Benchmark reading of bundle files.
perfcca (no help text available)
perfchangegroupchangelog
Benchmark producing a changelog group for a changegroup.
@@ -173,3 +175,7 @@
$ (testrepohg files -r 1.2 glob:mercurial/*.c glob:mercurial/*.py;
> testrepohg files -r tip glob:mercurial/*.c glob:mercurial/*.py) |
> "$TESTDIR"/check-perf-code.py contrib/perf.py
+ contrib/perf.py:\d+: (re)
+ > from mercurial import (
+ import newer module separately in try clause for early Mercurial
+ [1]
--- a/tests/test-convert-authormap.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-authormap.t Mon Jan 22 17:53:02 2018 -0500
@@ -27,7 +27,7 @@
sorting...
converting...
0 foo
- writing author map file $TESTTMP/new/.hg/authormap (glob)
+ writing author map file $TESTTMP/new/.hg/authormap
$ cat new/.hg/authormap
user name=Long User Name
$ hg -Rnew log
@@ -44,7 +44,7 @@
$ hg init new
$ mv authormap.txt new/.hg/authormap
$ hg convert orig new
- ignoring bad line in author map file $TESTTMP/new/.hg/authormap: this line is ignored (glob)
+ ignoring bad line in author map file $TESTTMP/new/.hg/authormap: this line is ignored
scanning source...
sorting...
converting...
--- a/tests/test-convert-cvs.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-cvs.t Mon Jan 22 17:53:02 2018 -0500
@@ -80,7 +80,12 @@
since it does not use DST (unlike other U.S. time zones) and is always
a fixed difference from UTC.
- $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg
+This choice is limited to work on Linux environments. At least on
+FreeBSD 11 this timezone is not known. A better choice is
+TZ=Pacific/Johnston. On Linux "US/Hawaii" is just a symlink to this
+name and also it is known on FreeBSD and on Solaris.
+
+ $ TZ=Pacific/Johnston hg convert --config convert.localtimezone=True src src-hg
initializing destination src-hg repository
connecting to $TESTTMP/cvsrepo
scanning source...
@@ -170,7 +175,7 @@
convert again
- $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg
+ $ TZ=Pacific/Johnston hg convert --config convert.localtimezone=True src src-hg
connecting to $TESTTMP/cvsrepo
scanning source...
collecting CVS rlog
@@ -231,7 +236,7 @@
convert again
- $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg
+ $ TZ=Pacific/Johnston hg convert --config convert.localtimezone=True src src-hg
connecting to $TESTTMP/cvsrepo
scanning source...
collecting CVS rlog
@@ -249,7 +254,7 @@
convert again with --filemap
- $ TZ=US/Hawaii hg convert --config convert.localtimezone=True --filemap filemap src src-filemap
+ $ TZ=Pacific/Johnston hg convert --config convert.localtimezone=True --filemap filemap src src-filemap
connecting to $TESTTMP/cvsrepo
scanning source...
collecting CVS rlog
@@ -296,7 +301,7 @@
convert again
- $ TZ=US/Hawaii hg convert --config convert.cvsps.fuzz=2 --config convert.localtimezone=True src src-hg
+ $ TZ=Pacific/Johnston hg convert --config convert.cvsps.fuzz=2 --config convert.localtimezone=True src src-hg
connecting to $TESTTMP/cvsrepo
scanning source...
collecting CVS rlog
--- a/tests/test-convert-filemap.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-filemap.t Mon Jan 22 17:53:02 2018 -0500
@@ -637,7 +637,7 @@
$ cd namedbranch
$ hg --config extensions.mq= strip tip
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/namedbranch/.hg/strip-backup/73899bcbe45c-92adf160-backup.hg (glob)
+ saved backup bundle to $TESTTMP/namedbranch/.hg/strip-backup/73899bcbe45c-92adf160-backup.hg
$ hg up foo
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge default
--- a/tests/test-convert-git.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-git.t Mon Jan 22 17:53:02 2018 -0500
@@ -936,7 +936,7 @@
$ COMMIT_OBJ=1c/0ce3c5886f83a1d78a7b517cdff5cf9ca17bdd
$ mv git-repo4/.git/objects/$COMMIT_OBJ git-repo4/.git/objects/$COMMIT_OBJ.tmp
$ hg convert git-repo4 git-repo4-broken-hg 2>&1 | grep 'abort:'
- abort: cannot retrieve number of commits in $TESTTMP/git-repo4/.git (glob)
+ abort: cannot retrieve number of commits in $TESTTMP/git-repo4/.git
$ mv git-repo4/.git/objects/$COMMIT_OBJ.tmp git-repo4/.git/objects/$COMMIT_OBJ
damage git repository by renaming a blob object
--- a/tests/test-convert-svn-encoding.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-svn-encoding.t Mon Jan 22 17:53:02 2018 -0500
@@ -12,7 +12,7 @@
$ hg --debug convert svn-repo A-hg --config progress.debug=1
initializing destination A-hg repository
- reparent to file://*/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
run hg sink pre-conversion action
scanning source...
found trunk at 'trunk'
@@ -21,7 +21,7 @@
found branch branch\xc3\xa9 at 5 (esc)
found branch branch\xc3\xa9e at 6 (esc)
scanning: 1/4 revisions (25.00%)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
fetching revision log for "/trunk" from 4 to 0
parsing revision 4 (2 changes)
parsing revision 3 (4 changes)
@@ -31,18 +31,18 @@
'/branches' is not under '/trunk', ignoring
'/tags' is not under '/trunk', ignoring
scanning: 2/4 revisions (50.00%)
- reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9 (glob)
fetching revision log for "/branches/branch\xc3\xa9" from 5 to 0 (esc)
parsing revision 5 (1 changes)
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9 (glob)
found parent of branch /branches/branch\xc3\xa9 at 4: /trunk (esc)
scanning: 3/4 revisions (75.00%)
- reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9e (glob)
fetching revision log for "/branches/branch\xc3\xa9e" from 6 to 0 (esc)
parsing revision 6 (1 changes)
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9e (glob)
found parent of branch /branches/branch\xc3\xa9e at 5: /branches/branch\xc3\xa9 (esc)
scanning: 4/4 revisions (100.00%)
scanning: 5/4 revisions (125.00%)
@@ -57,7 +57,7 @@
4 hello
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2
converting: 1/6 revisions (16.67%)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
scanning paths: /trunk/\xc3\xa0 0/3 paths (0.00%) (esc)
scanning paths: /trunk/\xc3\xa0/e\xcc\x81 1/3 paths (33.33%) (esc)
scanning paths: /trunk/\xc3\xa9 2/3 paths (66.67%) (esc)
@@ -74,14 +74,14 @@
converting: 2/6 revisions (33.33%)
scanning paths: /trunk/\xc3\xa0 0/4 paths (0.00%) (esc)
gone from -1
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
scanning paths: /trunk/\xc3\xa8 1/4 paths (25.00%) (esc)
copied to \xc3\xa8 from \xc3\xa9@2 (esc)
scanning paths: /trunk/\xc3\xa9 2/4 paths (50.00%) (esc)
gone from -1
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
scanning paths: /trunk/\xc3\xb9 3/4 paths (75.00%) (esc)
mark /trunk/\xc3\xb9 came from \xc3\xa0:2 (esc)
getting files: \xc3\xa0/e\xcc\x81 1/4 files (25.00%) (esc)
@@ -101,12 +101,12 @@
converting: 3/6 revisions (50.00%)
scanning paths: /trunk/\xc3\xa8 0/2 paths (0.00%) (esc)
gone from -1
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
scanning paths: /trunk/\xc3\xb9 1/2 paths (50.00%) (esc)
gone from -1
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/trunk (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/trunk (glob)
getting files: \xc3\xa8 1/2 files (50.00%) (esc)
getting files: \xc3\xb9/e\xcc\x81 2/2 files (100.00%) (esc)
committing files:
@@ -116,21 +116,21 @@
1 branch to branch?
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5
converting: 4/6 revisions (66.67%)
- reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9 (glob)
scanning paths: /branches/branch\xc3\xa9 0/1 paths (0.00%) (esc)
committing changelog
updating the branch cache
0 branch to branch?e
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6
converting: 5/6 revisions (83.33%)
- reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9e (glob)
scanning paths: /branches/branch\xc3\xa9e 0/1 paths (0.00%) (esc)
committing changelog
updating the branch cache
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
- reparent to file://*/svn-repo (glob)
- reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9e (glob)
+ reparent to file:/*/$TESTTMP/svn-repo (glob)
+ reparent to file:/*/$TESTTMP/svn-repo/branches/branch%C3%A9e (glob)
updating tags
committing files:
.hgtags
--- a/tests/test-convert-svn-sink.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-svn-sink.t Mon Jan 22 17:53:02 2018 -0500
@@ -48,8 +48,8 @@
0 modify a file
$ svnupanddisplay a-hg-wc 2
2 1 test d1
- 2 1 test d1/d2 (glob)
- 2 1 test d1/d2/b (glob)
+ 2 1 test d1/d2
+ 2 1 test d1/d2/b
2 2 test .
2 2 test a
revision: 2
@@ -89,8 +89,8 @@
0 rename a file
$ svnupanddisplay a-hg-wc 1
3 1 test d1
- 3 1 test d1/d2 (glob)
- 3 1 test d1/d2/b (glob)
+ 3 1 test d1/d2
+ 3 1 test d1/d2/b
3 3 test .
3 3 test b
revision: 3
@@ -124,8 +124,8 @@
0 copy a file
$ svnupanddisplay a-hg-wc 1
4 1 test d1
- 4 1 test d1/d2 (glob)
- 4 1 test d1/d2/b (glob)
+ 4 1 test d1/d2
+ 4 1 test d1/d2/b
4 3 test b
4 4 test .
4 4 test c
@@ -161,8 +161,8 @@
0 remove a file
$ svnupanddisplay a-hg-wc 1
5 1 test d1
- 5 1 test d1/d2 (glob)
- 5 1 test d1/d2/b (glob)
+ 5 1 test d1/d2
+ 5 1 test d1/d2/b
5 4 test c
5 5 test .
revision: 5
@@ -203,8 +203,8 @@
0 make a file executable
$ svnupanddisplay a-hg-wc 1
6 1 test d1
- 6 1 test d1/d2 (glob)
- 6 1 test d1/d2/b (glob)
+ 6 1 test d1/d2
+ 6 1 test d1/d2/b
6 6 test .
6 6 test c
revision: 6
@@ -256,7 +256,7 @@
$ hg --cwd a up 5
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg --cwd a --config extensions.strip= strip -r 6
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/bd4f7b7a7067-ed505e42-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/bd4f7b7a7067-ed505e42-backup.hg
#endif
@@ -312,7 +312,7 @@
$ svnupanddisplay a-hg-wc 1
1 1 test .
1 1 test d1
- 1 1 test d1/a (glob)
+ 1 1 test d1/a
revision: 1
author: test
msg: add executable file in new directory
@@ -337,10 +337,10 @@
0 copy file to new directory
$ svnupanddisplay a-hg-wc 1
2 1 test d1
- 2 1 test d1/a (glob)
+ 2 1 test d1/a
2 2 test .
2 2 test d2
- 2 2 test d2/a (glob)
+ 2 2 test d2/a
revision: 2
author: test
msg: copy file to new directory
@@ -384,7 +384,7 @@
merging b
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
2 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg --cwd b revert -r 2 b
$ hg --cwd b resolve -m b
--- a/tests/test-convert-svn-source.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert-svn-source.t Mon Jan 22 17:53:02 2018 -0500
@@ -32,8 +32,8 @@
$ cd ..
$ svn import -m "init projB" projB "$SVNREPOURL/proj%20B" | filter_svn_output | sort
- Adding projB/mytrunk (glob)
- Adding projB/tags (glob)
+ Adding projB/mytrunk
+ Adding projB/tags
Committed revision 1.
Update svn repository
@@ -253,3 +253,72 @@
abort: svn-empty: missing or unsupported repository
[255]
$ mv format svn-empty/format
+
+enable svn subrepos
+
+ $ cat >> $HGRCPATH <<EOF
+ > [subrepos]
+ > svn:allowed = true
+ > EOF
+
+try converting when we have an svn subrepo and a merge in hg superrepo (issue5657)
+
+ $ cd "$TESTTMP"
+ $ hg init withmerge
+ $ cd withmerge
+ $ echo "subrepo = [svn]$SVNREPOURL" >.hgsub
+ $ hg add .hgsub
+ $ svn checkout "$SVNREPOURL" subrepo | sort
+ A subrepo/proj B
+ A subrepo/proj B/mytrunk
+ A subrepo/proj B/mytrunk/letter .txt
+ A subrepo/proj B/mytrunk/letter2.txt
+ A subrepo/proj B/tags
+ A subrepo/proj B/tags/v0.1
+ A subrepo/proj B/tags/v0.1/letter .txt
+ A subrepo/proj B/tags/v0.2
+ A subrepo/proj B/tags/v0.2/letter .txt
+ A subrepo/proj B/tags/v0.2/letter2.txt
+ Checked out revision 9.
+ $ hg ci -m "Adding svn subrepo"
+ $ touch file1.txt
+ $ hg add file1.txt
+ $ hg ci -m "Adding file1"
+ $ hg up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ touch file2.txt
+ $ hg add file2.txt
+ $ hg ci -m "Adding file2"
+ created new head
+ $ hg merge 1
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m "merged"
+ $ cd ..
+ $ hg --config extensions.convert= convert withmerge withmerge-converted
+ initializing destination withmerge-converted repository
+ scanning source...
+ sorting...
+ converting...
+ 3 Adding svn subrepo
+ 2 Adding file1
+ 1 Adding file2
+ 0 merged
+ $ cd withmerge-converted
+ $ hg up | sort
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ A subrepo/proj B
+ A subrepo/proj B/mytrunk
+ A subrepo/proj B/mytrunk/letter .txt
+ A subrepo/proj B/mytrunk/letter2.txt
+ A subrepo/proj B/tags
+ A subrepo/proj B/tags/v0.1
+ A subrepo/proj B/tags/v0.1/letter .txt
+ A subrepo/proj B/tags/v0.2
+ A subrepo/proj B/tags/v0.2/letter .txt
+ A subrepo/proj B/tags/v0.2/letter2.txt
+ Checked out revision 9.
+ $ ls
+ file1.txt
+ file2.txt
+ subrepo
--- a/tests/test-convert.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-convert.t Mon Jan 22 17:53:02 2018 -0500
@@ -476,7 +476,7 @@
assuming destination emptydir-hg
initializing destination emptydir-hg repository
emptydir does not look like a CVS checkout
- $TESTTMP/emptydir does not look like a Git repository (glob)
+ $TESTTMP/emptydir does not look like a Git repository
emptydir does not look like a Subversion repository
emptydir is not a local Mercurial repository
emptydir does not look like a darcs repository
--- a/tests/test-copy-move-merge.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-copy-move-merge.t Mon Jan 22 17:53:02 2018 -0500
@@ -82,7 +82,7 @@
$ hg strip -r . --config extensions.strip=
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/t/.hg/strip-backup/550bd84c0cd3-fc575957-backup.hg (glob)
+ saved backup bundle to $TESTTMP/t/.hg/strip-backup/550bd84c0cd3-fc575957-backup.hg
$ hg up -qC 2
$ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.copytrace=off --config ui.interactive=True << EOF
> c
@@ -122,7 +122,7 @@
$ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.copytrace=off
rebasing 2:6adcf8c12e7d "copy b->x"
- saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-rebase.hg
$ hg up -q 3
$ hg log -f x -T '{rev} {desc}\n'
3 copy b->x
@@ -155,7 +155,7 @@
$ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.copytrace=off
rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
- saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg
$ hg log -G -f b
@ changeset: 3:76024fb4b05b
--- a/tests/test-copytrace-heuristics.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-copytrace-heuristics.t Mon Jan 22 17:53:02 2018 -0500
@@ -55,7 +55,7 @@
rebasing 2:557f403c0afd "mod a, mod dir/file.txt" (tip)
merging b and a to b
merging dir2/file.txt and dir/file.txt to dir2/file.txt
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/557f403c0afd-9926eeff-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/557f403c0afd-9926eeff-rebase.hg
$ cd ..
$ rm -rf repo
@@ -125,7 +125,7 @@
$ hg rebase -s . -d 2
rebasing 3:9d5cf99c3d9f "mod a" (tip)
merging b and a to b
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9d5cf99c3d9f-f02358cc-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9d5cf99c3d9f-f02358cc-rebase.hg
$ cd ..
$ rm -rf repo
@@ -160,7 +160,7 @@
$ hg rebase -s . -d 0
rebasing 3:fbe97126b396 "mod b" (tip)
merging a and b to a
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fbe97126b396-cf5452a1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fbe97126b396-cf5452a1-rebase.hg
$ cd ..
$ rm -rf repo
@@ -197,7 +197,7 @@
$ hg rebase -s . -d 2
rebasing 3:6b2f4cece40f "mod dir/a" (tip)
merging dir/b and dir/a to dir/b
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6b2f4cece40f-503efe60-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6b2f4cece40f-503efe60-rebase.hg
$ cd ..
$ rm -rf repo
@@ -255,7 +255,7 @@
$ hg rebase -s 2 -d 1
rebasing 2:ef716627c70b "mod a" (tip)
merging foo and a to foo
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
$ cd ..
$ rm -rf repo
@@ -286,7 +286,7 @@
$ hg rebase -s 1 -d 2
rebasing 1:472e38d57782 "mv a b"
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-17d50e29-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-17d50e29-rebase.hg
$ hg up -q c492ed3c7e35dcd1dc938053b8adf56e2cfbd062
$ ls
b
@@ -320,7 +320,7 @@
$ hg rebase -s . -d 1
rebasing 2:a33d80b6e352 "mv dir/ dir2/" (tip)
merging dir/a and dir2/a to dir2/a
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a33d80b6e352-fecb9ada-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a33d80b6e352-fecb9ada-rebase.hg
$ cd ..
$ rm -rf server
$ rm -rf repo
@@ -355,7 +355,7 @@
$ hg rebase -s . -d 2
rebasing 3:d41316942216 "mod a" (tip)
merging c and a to c
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d41316942216-2b5949bc-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d41316942216-2b5949bc-rebase.hg
$ cd ..
$ rm -rf repo
@@ -391,7 +391,7 @@
merging a and b to b
rebasing 2:d3efd280421d "mv b c"
merging b and c to c
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-ab8d3c58-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-ab8d3c58-rebase.hg
$ cd ..
$ rm -rf repo
@@ -428,7 +428,7 @@
$ hg rebase -s . -d 2
rebasing 3:ef716627c70b "mod a" (tip)
merging b and a to b
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
$ ls
b
c
@@ -500,7 +500,7 @@
rebasing 2:ef716627c70b "mod a" (tip)
merging b and a to b
merging c and a to c
- saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
$ ls
b
c
@@ -624,7 +624,7 @@
$ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
rebasing 2:6207d2d318e7 "mod a" (tip)
merging dir2/b and dir1/a to dir2/b
- saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/6207d2d318e7-1c9779ad-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/6207d2d318e7-1c9779ad-rebase.hg
$ cat dir2/b
a
b
@@ -661,7 +661,7 @@
$ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
rebasing 2:e8919e7df8d0 "mv dir1 dir2" (tip)
- saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/e8919e7df8d0-f62fab62-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/e8919e7df8d0-f62fab62-rebase.hg
$ ls dir2
a
dummy
@@ -711,6 +711,6 @@
$ hg rebase -s 8b6e13696 -d . --config experimental.copytrace.sourcecommitlimit=100
rebasing 1:8b6e13696c38 "added more things to a"
merging foo/bar and a to foo/bar
- saved backup bundle to $TESTTMP/repo/repo/repo/.hg/strip-backup/8b6e13696c38-fc14ac83-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/repo/repo/.hg/strip-backup/8b6e13696c38-fc14ac83-rebase.hg
$ cd ..
$ rm -rf repo
--- a/tests/test-debugcommands.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-debugcommands.t Mon Jan 22 17:53:02 2018 -0500
@@ -1,4 +1,6 @@
$ cat << EOF >> $HGRCPATH
+ > [ui]
+ > interactive=yes
> [format]
> usegeneraldelta=yes
> EOF
@@ -77,6 +79,72 @@
}
]
+debugdelta chain with sparse read enabled
+
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > sparse-read = True
+ > EOF
+ $ hg debugdeltachain -m
+ rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
+
+ $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
+ 0 1 1 44 44 1.0
+
+ $ hg debugdeltachain -m -Tjson
+ [
+ {
+ "chainid": 1,
+ "chainlen": 1,
+ "chainratio": 1.02325581395,
+ "chainsize": 44,
+ "compsize": 44,
+ "deltatype": "base",
+ "extradist": 0,
+ "extraratio": 0.0,
+ "largestblock": 44,
+ "lindist": 44,
+ "prevrev": -1,
+ "readdensity": 1.0,
+ "readsize": 44,
+ "rev": 0,
+ "srchunks": 1,
+ "uncompsize": 43
+ }
+ ]
+
+ $ printf "This test checks things.\n" >> a
+ $ hg ci -m a
+ $ hg branch other
+ marked working directory as branch other
+ (branches are permanent and global, did you want a bookmark?)
+ $ for i in `$TESTDIR/seq.py 5`; do
+ > printf "shorter ${i}" >> a
+ > hg ci -m "a other:$i"
+ > hg up -q default
+ > printf "for the branch default we want longer chains: ${i}" >> a
+ > hg ci -m "a default:$i"
+ > hg up -q other
+ > done
+ $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
+ > --config experimental.sparse-read.density-threshold=0.50 \
+ > --config experimental.sparse-read.min-gap-size=0
+ 0 1
+ 1 1
+ 2 1
+ 3 1
+ 4 1
+ 5 1
+ 6 1
+ 7 1
+ 8 1
+ 9 1
+ 10 2
+ 11 1
+ $ hg --config extensions.strip= strip --no-backup -r 1
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
Test max chain len
$ cat >> $HGRCPATH << EOF
> [format]
@@ -111,6 +179,126 @@
7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
+Test debuglocks command:
+
+ $ hg debuglocks
+ lock: free
+ wlock: free
+
+* Test setting the lock
+
+waitlock <file> will wait for file to be created. If it isn't in a reasonable
+amount of time, displays error message and returns 1
+ $ waitlock() {
+ > start=`date +%s`
+ > timeout=5
+ > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
+ > now=`date +%s`
+ > if [ "`expr $now - $start`" -gt $timeout ]; then
+ > echo "timeout: $1 was not created in $timeout seconds"
+ > return 1
+ > fi
+ > sleep 0.1
+ > done
+ > }
+ $ dolock() {
+ > {
+ > waitlock .hg/unlock
+ > rm -f .hg/unlock
+ > echo y
+ > } | hg debuglocks "$@" > /dev/null
+ > }
+ $ dolock -s &
+ $ waitlock .hg/store/lock
+
+ $ hg debuglocks
+ lock: user *, process * (*s) (glob)
+ wlock: free
+ [1]
+ $ touch .hg/unlock
+ $ wait
+ $ [ -f .hg/store/lock ] || echo "There is no lock"
+ There is no lock
+
+* Test setting the wlock
+
+ $ dolock -S &
+ $ waitlock .hg/wlock
+
+ $ hg debuglocks
+ lock: free
+ wlock: user *, process * (*s) (glob)
+ [1]
+ $ touch .hg/unlock
+ $ wait
+ $ [ -f .hg/wlock ] || echo "There is no wlock"
+ There is no wlock
+
+* Test setting both locks
+
+ $ dolock -Ss &
+ $ waitlock .hg/wlock && waitlock .hg/store/lock
+
+ $ hg debuglocks
+ lock: user *, process * (*s) (glob)
+ wlock: user *, process * (*s) (glob)
+ [2]
+
+* Test failing to set a lock
+
+ $ hg debuglocks -s
+ abort: lock is already held
+ [255]
+
+ $ hg debuglocks -S
+ abort: wlock is already held
+ [255]
+
+ $ touch .hg/unlock
+ $ wait
+
+ $ hg debuglocks
+ lock: free
+ wlock: free
+
+* Test forcing the lock
+
+ $ dolock -s &
+ $ waitlock .hg/store/lock
+
+ $ hg debuglocks
+ lock: user *, process * (*s) (glob)
+ wlock: free
+ [1]
+
+ $ hg debuglocks -L
+
+ $ hg debuglocks
+ lock: free
+ wlock: free
+
+ $ touch .hg/unlock
+ $ wait
+
+* Test forcing the wlock
+
+ $ dolock -S &
+ $ waitlock .hg/wlock
+
+ $ hg debuglocks
+ lock: free
+ wlock: user *, process * (*s) (glob)
+ [1]
+
+ $ hg debuglocks -W
+
+ $ hg debuglocks
+ lock: free
+ wlock: free
+
+ $ touch .hg/unlock
+ $ wait
+
Test WdirUnsupported exception
$ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
@@ -156,3 +344,40 @@
from h hidden in g at:
debugstacktrace.py:6 in f
debugstacktrace.py:9 in g
+
+Test debugcapabilities command:
+
+ $ hg debugcapabilities ./debugrevlog/
+ Main capabilities:
+ branchmap
+ $USUAL_BUNDLE2_CAPS$
+ getbundle
+ known
+ lookup
+ pushkey
+ unbundle
+ Bundle2 capabilities:
+ HG20
+ bookmarks
+ changegroup
+ 01
+ 02
+ digests
+ md5
+ sha1
+ sha512
+ error
+ abort
+ unsupportedcontent
+ pushraced
+ pushkey
+ hgtagsfnodes
+ listkeys
+ phases
+ heads
+ pushkey
+ remote-changegroup
+ http
+ https
+ stream
+ v2
--- a/tests/test-default-push.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-default-push.t Mon Jan 22 17:53:02 2018 -0500
@@ -27,7 +27,7 @@
Push should push to 'default' when 'default-push' not set:
$ hg --cwd b push
- pushing to $TESTTMP/a (glob)
+ pushing to $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -39,7 +39,7 @@
$ echo '[paths]' >> b/.hg/hgrc
$ echo 'default-push = ../c' >> b/.hg/hgrc
$ hg --cwd b push
- pushing to $TESTTMP/c (glob)
+ pushing to $TESTTMP/c
searching for changes
adding changesets
adding manifests
@@ -49,7 +49,7 @@
But push should push to 'default' if explicitly specified (issue5000):
$ hg --cwd b push default
- pushing to $TESTTMP/a (glob)
+ pushing to $TESTTMP/a
searching for changes
no changes found
[1]
@@ -63,7 +63,7 @@
$ touch foo
$ hg -q commit -A -m 'add foo'
$ hg --config paths.default-push=../a push
- pushing to $TESTTMP/a (glob)
+ pushing to $TESTTMP/a
searching for changes
adding changesets
adding manifests
--- a/tests/test-devel-warnings.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-devel-warnings.t Mon Jan 22 17:53:02 2018 -0500
@@ -99,7 +99,7 @@
#if no-chg
$ hg buggylocking --traceback
devel-warn: "wlock" acquired after "lock" at:
- */hg:* in <module> (glob)
+ */hg:* in <module> (glob) (?)
*/mercurial/dispatch.py:* in run (glob)
*/mercurial/dispatch.py:* in dispatch (glob)
*/mercurial/dispatch.py:* in _runcatch (glob)
@@ -115,7 +115,7 @@
#else
$ hg buggylocking --traceback
devel-warn: "wlock" acquired after "lock" at:
- */hg:* in <module> (glob)
+ */hg:* in <module> (glob) (?)
*/mercurial/dispatch.py:* in run (glob)
*/mercurial/dispatch.py:* in dispatch (glob)
*/mercurial/dispatch.py:* in _runcatch (glob)
@@ -177,7 +177,7 @@
$ hg oldanddeprecated --traceback
devel-warn: foorbar is deprecated, go shopping
(compatibility will be dropped after Mercurial-42.1337, update your code.) at:
- */hg:* in <module> (glob)
+ */hg:* in <module> (glob) (?)
*/mercurial/dispatch.py:* in run (glob)
*/mercurial/dispatch.py:* in dispatch (glob)
*/mercurial/dispatch.py:* in _runcatch (glob)
@@ -238,7 +238,7 @@
1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
(compatibility will be dropped after Mercurial-42.1337, update your code.) at:
- */hg:* in <module> (glob)
+ */hg:* in <module> (glob) (?)
*/mercurial/dispatch.py:* in run (glob)
*/mercurial/dispatch.py:* in dispatch (glob)
*/mercurial/dispatch.py:* in _runcatch (glob)
--- a/tests/test-diff-color.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-diff-color.t Mon Jan 22 17:53:02 2018 -0500
@@ -259,3 +259,134 @@
\x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mall\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtabs\x1b[0m\x1b[0;1;41m \x1b[0m (esc)
$ cd ..
+
+test inline color diff
+
+ $ hg init inline
+ $ cd inline
+ $ cat > file1 << EOF
+ > this is the first line
+ > this is the second line
+ > third line starts with space
+ > + starts with a plus sign
+ > this one with one tab
+ > now with full two tabs
+ > now tabs everywhere, much fun
+ >
+ > this line won't change
+ >
+ > two lines are going to
+ > be changed into three!
+ >
+ > three of those lines will
+ > collapse onto one
+ > (to see if it works)
+ > EOF
+ $ hg add file1
+ $ hg ci -m 'commit'
+
+ $ cat > file1 << EOF
+ > that is the first paragraph
+ > this is the second line
+ > third line starts with space
+ > - starts with a minus sign
+ > this one with two tab
+ > now with full three tabs
+ > now there are tabs everywhere, much fun
+ >
+ > this line won't change
+ >
+ > two lines are going to
+ > (entirely magically,
+ > assuming this works)
+ > be changed into four!
+ >
+ > three of those lines have
+ > collapsed onto one
+ > EOF
+ $ hg diff --config experimental.worddiff=False --color=debug
+ [diff.diffline|diff --git a/file1 b/file1]
+ [diff.file_a|--- a/file1]
+ [diff.file_b|+++ b/file1]
+ [diff.hunk|@@ -1,16 +1,17 @@]
+ [diff.deleted|-this is the first line]
+ [diff.deleted|-this is the second line]
+ [diff.deleted|- third line starts with space]
+ [diff.deleted|-+ starts with a plus sign]
+ [diff.deleted|-][diff.tab| ][diff.deleted|this one with one tab]
+ [diff.deleted|-][diff.tab| ][diff.deleted|now with full two tabs]
+ [diff.deleted|-][diff.tab| ][diff.deleted|now tabs][diff.tab| ][diff.deleted|everywhere, much fun]
+ [diff.inserted|+that is the first paragraph]
+ [diff.inserted|+ this is the second line]
+ [diff.inserted|+third line starts with space]
+ [diff.inserted|+- starts with a minus sign]
+ [diff.inserted|+][diff.tab| ][diff.inserted|this one with two tab]
+ [diff.inserted|+][diff.tab| ][diff.inserted|now with full three tabs]
+ [diff.inserted|+][diff.tab| ][diff.inserted|now there are tabs][diff.tab| ][diff.inserted|everywhere, much fun]
+
+ this line won't change
+
+ two lines are going to
+ [diff.deleted|-be changed into three!]
+ [diff.inserted|+(entirely magically,]
+ [diff.inserted|+ assuming this works)]
+ [diff.inserted|+be changed into four!]
+
+ [diff.deleted|-three of those lines will]
+ [diff.deleted|-collapse onto one]
+ [diff.deleted|-(to see if it works)]
+ [diff.inserted|+three of those lines have]
+ [diff.inserted|+collapsed onto one]
+ $ hg diff --config experimental.worddiff=True --color=debug
+ [diff.diffline|diff --git a/file1 b/file1]
+ [diff.file_a|--- a/file1]
+ [diff.file_b|+++ b/file1]
+ [diff.hunk|@@ -1,16 +1,17 @@]
+ [diff.deleted|-this is the ][diff.deleted.highlight|first][diff.deleted| line]
+ [diff.deleted|-this is the second line]
+ [diff.deleted|-][diff.deleted.highlight| ][diff.deleted|third line starts with space]
+ [diff.deleted|-][diff.deleted.highlight|+][diff.deleted| starts with a ][diff.deleted.highlight|plus][diff.deleted| sign]
+ [diff.deleted|-][diff.tab| ][diff.deleted|this one with ][diff.deleted.highlight|one][diff.deleted| tab]
+ [diff.deleted|-][diff.tab| ][diff.deleted|now with full ][diff.deleted.highlight|two][diff.deleted| tabs]
+ [diff.deleted|-][diff.tab| ][diff.deleted|now tabs][diff.tab| ][diff.deleted|everywhere, much fun]
+ [diff.inserted|+that is the first paragraph]
+ [diff.inserted|+][diff.inserted.highlight| ][diff.inserted|this is the ][diff.inserted.highlight|second][diff.inserted| line]
+ [diff.inserted|+third line starts with space]
+ [diff.inserted|+][diff.inserted.highlight|-][diff.inserted| starts with a ][diff.inserted.highlight|minus][diff.inserted| sign]
+ [diff.inserted|+][diff.tab| ][diff.inserted|this one with ][diff.inserted.highlight|two][diff.inserted| tab]
+ [diff.inserted|+][diff.tab| ][diff.inserted|now with full ][diff.inserted.highlight|three][diff.inserted| tabs]
+ [diff.inserted|+][diff.tab| ][diff.inserted|now][diff.inserted.highlight| there are][diff.inserted| tabs][diff.tab| ][diff.inserted|everywhere, much fun]
+
+ this line won't change
+
+ two lines are going to
+ [diff.deleted|-be changed into ][diff.deleted.highlight|three][diff.deleted|!]
+ [diff.inserted|+(entirely magically,]
+ [diff.inserted|+ assuming this works)]
+ [diff.inserted|+be changed into ][diff.inserted.highlight|four][diff.inserted|!]
+
+ [diff.deleted|-three of those lines ][diff.deleted.highlight|will]
+ [diff.deleted|-][diff.deleted.highlight|collapse][diff.deleted| onto one]
+ [diff.deleted|-(to see if it works)]
+ [diff.inserted|+three of those lines ][diff.inserted.highlight|have]
+ [diff.inserted|+][diff.inserted.highlight|collapsed][diff.inserted| onto one]
+
+multibyte character shouldn't be broken up in word diff:
+
+ $ $PYTHON <<'EOF'
+ > with open("utf8", "wb") as f:
+ > f.write(b"blah \xe3\x82\xa2 blah\n")
+ > EOF
+ $ hg ci -Am 'add utf8 char' utf8
+ $ $PYTHON <<'EOF'
+ > with open("utf8", "wb") as f:
+ > f.write(b"blah \xe3\x82\xa4 blah\n")
+ > EOF
+ $ hg ci -m 'slightly change utf8 char' utf8
+ $ hg diff --config experimental.worddiff=True --color=debug -c.
+ [diff.diffline|diff --git a/utf8 b/utf8]
+ [diff.file_a|--- a/utf8]
+ [diff.file_b|+++ b/utf8]
+ [diff.hunk|@@ -1,1 +1,1 @@]
+ [diff.deleted|-blah ][diff.deleted.highlight|\xe3\x82\xa2][diff.deleted| blah] (esc)
+ [diff.inserted|+blah ][diff.inserted.highlight|\xe3\x82\xa4][diff.inserted| blah] (esc)
--- a/tests/test-diff-upgrade.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-diff-upgrade.t Mon Jan 22 17:53:02 2018 -0500
@@ -16,7 +16,7 @@
$ echo regular > regular
$ echo rmregular > rmregular
- $ $PYTHON -c "file('bintoregular', 'wb').write('\0')"
+ $ $PYTHON -c "open('bintoregular', 'wb').write(b'\0')"
$ touch rmempty
$ echo exec > exec
$ chmod +x exec
@@ -26,7 +26,7 @@
$ echo unsetexec > unsetexec
$ chmod +x unsetexec
$ echo binary > binary
- $ $PYTHON -c "file('rmbinary', 'wb').write('\0')"
+ $ $PYTHON -c "open('rmbinary', 'wb').write(b'\0')"
$ hg ci -Am addfiles
adding binary
adding bintoregular
@@ -50,8 +50,8 @@
$ rm rmexec
$ chmod +x setexec
$ chmod -x unsetexec
- $ $PYTHON -c "file('binary', 'wb').write('\0\0')"
- $ $PYTHON -c "file('newbinary', 'wb').write('\0')"
+ $ $PYTHON -c "open('binary', 'wb').write(b'\0\0')"
+ $ $PYTHON -c "open('newbinary', 'wb').write(b'\0')"
$ rm rmbinary
$ hg addremove -s 0
adding newbinary
--- a/tests/test-diffstat.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-diffstat.t Mon Jan 22 17:53:02 2018 -0500
@@ -35,7 +35,7 @@
$ hg ci -m appenda
- >>> open("c", "wb").write("\0")
+ >>> open("c", "wb").write(b"\0")
$ touch d
$ hg add c d
@@ -54,7 +54,7 @@
$ hg ci -m createb
- >>> open("file with spaces", "wb").write("\0")
+ >>> open("file with spaces", "wb").write(b"\0")
$ hg add "file with spaces"
Filename with spaces diffstat:
@@ -151,7 +151,7 @@
1 files changed, 1 insertions(+), 0 deletions(-)
$ hg diff --stat --root ../dir1 ../dir2
- warning: ../dir2 not inside relative root . (glob)
+ warning: ../dir2 not inside relative root .
$ hg diff --stat --root . -I old
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-directaccess.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,201 @@
+Tests for access level on hidden commits by various commands on based of their
+type.
+
+Setting the required config to start this
+
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > evolution=createmarkers, allowunstable
+ > directaccess=True
+ > directaccess.revnums=True
+ > [extensions]
+ > amend =
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+ $ for ch in a b c; do touch $ch; echo "foo" >> $ch; hg ci -Aqm "Added "$ch; done
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 2:28ad74487de9599d00d81085be739c61fc340652 Added c
+ |
+ o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
+ |
+ o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
+
+ $ echo "bar" >> c
+ $ hg amend
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 3:2443a0e664694756d8b435d06b6ad84f941b6fc0 Added c
+ |
+ | x 2:28ad74487de9599d00d81085be739c61fc340652 Added c
+ |/
+ o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
+ |
+ o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
+
+Testing read only commands on the hidden revision
+
+Testing with rev number
+
+ $ hg exp 2 --config experimental.directaccess.revnums=False
+ abort: hidden revision '2' was rewritten as: 2443a0e66469!
+ (use --hidden to access hidden revisions)
+ [255]
+
+ $ hg exp 2
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 28ad74487de9599d00d81085be739c61fc340652
+ # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
+ Added c
+
+ diff -r 29becc82797a -r 28ad74487de9 c
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/c Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +foo
+
+ $ hg log -r 2
+ changeset: 2:28ad74487de9
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ obsolete: rewritten using amend as 3:2443a0e66469
+ summary: Added c
+
+ $ hg identify -r 2
+ 28ad74487de9
+
+ $ hg status --change 2
+ A c
+
+ $ hg status --change 2 --config experimental.directaccess.revnums=False
+ abort: hidden revision '2' was rewritten as: 2443a0e66469!
+ (use --hidden to access hidden revisions)
+ [255]
+
+ $ hg diff -c 2
+ diff -r 29becc82797a -r 28ad74487de9 c
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/c Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +foo
+
+Testing with hash
+
+`hg export`
+
+ $ hg exp 28ad74
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 28ad74487de9599d00d81085be739c61fc340652
+ # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
+ Added c
+
+ diff -r 29becc82797a -r 28ad74487de9 c
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/c Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +foo
+
+`hg log`
+
+ $ hg log -r 28ad74
+ changeset: 2:28ad74487de9
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ obsolete: rewritten using amend as 3:2443a0e66469
+ summary: Added c
+
+`hg cat`
+
+ $ hg cat -r 28ad74 c
+ foo
+
+`hg diff`
+
+ $ hg diff -c 28ad74
+ diff -r 29becc82797a -r 28ad74487de9 c
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/c Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +foo
+
+`hg files`
+
+ $ hg files -r 28ad74
+ a
+ b
+ c
+
+`hg identify`
+
+ $ hg identify -r 28ad74
+ 28ad74487de9
+
+`hg annotate`
+
+ $ hg annotate -r 28ad74 a
+ 0: foo
+
+`hg status`
+
+ $ hg status --change 28ad74
+ A c
+
+`hg archive`
+
+This should not throw error
+ $ hg archive -r 28ad74 foo
+
+`hg update`
+
+ $ hg up 28ad74
+ updating to a hidden changeset 28ad74487de9
+ (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ hg up 3
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ hg up
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+`hg revert`
+
+ $ hg revert -r 28ad74 --all
+ reverting c
+
+ $ hg diff
+ diff -r 2443a0e66469 c
+ --- a/c Thu Jan 01 00:00:00 1970 +0000
+ +++ b/c Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,2 +1,1 @@
+ foo
+ -bar
+
+Commands with undefined cmdtype should not work right now
+
+ $ hg phase -r 28ad74
+ abort: hidden revision '28ad74' was rewritten as: 2443a0e66469!
+ (use --hidden to access hidden revisions)
+ [255]
+
+ $ hg phase -r 2
+ abort: hidden revision '2' was rewritten as: 2443a0e66469!
+ (use --hidden to access hidden revisions)
+ [255]
+
+Setting a bookmark will make that changeset unhidden, so this should come in end
+
+ $ hg bookmarks -r 28ad74 book
+ bookmarking hidden changeset 28ad74487de9
+ (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
+
+ $ hg bookmarks
+ book 2:28ad74487de9
--- a/tests/test-dirstate-race.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-dirstate-race.t Mon Jan 22 17:53:02 2018 -0500
@@ -45,7 +45,7 @@
#endif
$ hg add b dir1 d e
- adding dir1/c (glob)
+ adding dir1/c
$ hg commit -m test2
$ cat >> $TESTTMP/dirstaterace.py << EOF
--- a/tests/test-dirstate.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-dirstate.t Mon Jan 22 17:53:02 2018 -0500
@@ -11,9 +11,9 @@
adding a/b/c/d/y
adding a/b/c/d/z
$ hg mv a z
- moving a/b/c/d/x to z/b/c/d/x (glob)
- moving a/b/c/d/y to z/b/c/d/y (glob)
- moving a/b/c/d/z to z/b/c/d/z (glob)
+ moving a/b/c/d/x to z/b/c/d/x
+ moving a/b/c/d/y to z/b/c/d/y
+ moving a/b/c/d/z to z/b/c/d/z
Test name collisions
--- a/tests/test-dispatch.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-dispatch.t Mon Jan 22 17:53:02 2018 -0500
@@ -37,10 +37,17 @@
hg log [OPTION]... [FILE]
(use 'hg log -h' to show more help)
- $ hg log -R -- 2>&1 | grep 'hg log'
- hg log: option -R requires argument
- hg log [OPTION]... [FILE]
- (use 'hg log -h' to show more help)
+"--" may be an option value:
+
+ $ hg -R -- log
+ abort: repository -- not found!
+ [255]
+ $ hg log -R --
+ abort: repository -- not found!
+ [255]
+ $ hg log -T --
+ -- (no-eol)
+ $ hg log -T -- -k nomatch
Parsing of early options should stop at "--":
@@ -87,7 +94,7 @@
[255]
$ hg log -b --cwd=inexistent default
- abort: No such file or directory: 'inexistent'
+ abort: $ENOENT$: 'inexistent'
[255]
$ hg log -b '--config=ui.traceback=yes' 2>&1 | grep '^Traceback'
@@ -149,6 +156,10 @@
[255]
$ HGPLAIN=+strictflags hg --cwd .. -q -Ra log -b default
0:cb9a9f314b8b
+ $ HGPLAIN=+strictflags hg --cwd .. -q --repository a log -b default
+ 0:cb9a9f314b8b
+ $ HGPLAIN=+strictflags hg --cwd .. -q --repo a log -b default
+ 0:cb9a9f314b8b
For compatibility reasons, HGPLAIN=+strictflags is not enabled by plain HGPLAIN:
@@ -200,7 +211,7 @@
The output could be one of the following and something else:
chg: abort: failed to getcwd (errno = *) (glob)
abort: error getting current working directory: * (glob)
- sh: 0: getcwd() failed: No such file or directory
+ sh: 0: getcwd() failed: $ENOENT$
Since the exact behavior depends on the shell, only check it returns non-zero.
$ HGDEMANDIMPORT=disable hg version -q 2>/dev/null || false
[1]
--- a/tests/test-drawdag.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-drawdag.t Mon Jan 22 17:53:02 2018 -0500
@@ -206,9 +206,10 @@
> \|/
> A
> EOS
+ 1 new orphan changesets
$ hg log -r 'sort(all(), topo)' -G --hidden -T '{desc} {node}'
- o G 711f53bbef0bebd12eb6f0511d5e2e998b984846
+ * G 711f53bbef0bebd12eb6f0511d5e2e998b984846
|
x F 64a8289d249234b9886244d379f15e6b650b28e3
|
@@ -227,11 +228,11 @@
o A 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
$ hg debugobsolete
- 112478962961147124edd43549aedd1a335e44bf 7fb047a69f220c21711122dfd94305a9efb60cba 64a8289d249234b9886244d379f15e6b650b28e3 711f53bbef0bebd12eb6f0511d5e2e998b984846 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'split', 'user': 'test'}
- 26805aba1e600a82e93661149f2313866a221a7b be0ef73c17ade3fc89dc41701eb9fc3a91b58282 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- be0ef73c17ade3fc89dc41701eb9fc3a91b58282 575c4b5ec114d64b681d33f8792853568bfb2b2c 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 64a8289d249234b9886244d379f15e6b650b28e3 0 {7fb047a69f220c21711122dfd94305a9efb60cba} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'prune', 'user': 'test'}
- 58e6b987bf7045fcd9c54f496396ca1d1fc81047 0 {575c4b5ec114d64b681d33f8792853568bfb2b2c} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'prune', 'user': 'test'}
+ 112478962961147124edd43549aedd1a335e44bf 7fb047a69f220c21711122dfd94305a9efb60cba 64a8289d249234b9886244d379f15e6b650b28e3 711f53bbef0bebd12eb6f0511d5e2e998b984846 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'split', 'user': 'test'}
+ 26805aba1e600a82e93661149f2313866a221a7b be0ef73c17ade3fc89dc41701eb9fc3a91b58282 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'replace', 'user': 'test'}
+ be0ef73c17ade3fc89dc41701eb9fc3a91b58282 575c4b5ec114d64b681d33f8792853568bfb2b2c 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'replace', 'user': 'test'}
+ 64a8289d249234b9886244d379f15e6b650b28e3 0 {7fb047a69f220c21711122dfd94305a9efb60cba} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'prune', 'user': 'test'}
+ 58e6b987bf7045fcd9c54f496396ca1d1fc81047 0 {575c4b5ec114d64b681d33f8792853568bfb2b2c} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'prune', 'user': 'test'}
Change file contents via comments
@@ -261,12 +262,12 @@
a
FILE B
b
- FILE dir1/a (glob)
+ FILE dir1/a
1
2
- FILE dir1/c (glob)
+ FILE dir1/c
5
- FILE dir2/b (glob)
+ FILE dir2/b
34
- FILE dir2/c (glob)
+ FILE dir2/c
6
--- a/tests/test-exchange-obsmarkers-case-A3.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-A3.t Mon Jan 22 17:53:02 2018 -0500
@@ -74,6 +74,7 @@
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -163,6 +164,7 @@
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -218,6 +220,7 @@
remote: added 1 changesets with 1 changes to 1 files (+1 heads)
remote: 1 new obsolescence markers
remote: obsoleted 1 changesets
+ remote: 1 new orphan changesets
## post push state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -234,6 +237,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
obsoleted 1 changesets
+ 1 new orphan changesets
new changesets e5ea8f9c7314
(run 'hg heads' to see heads, 'hg merge' to merge)
## post pull state
--- a/tests/test-exchange-obsmarkers-case-A4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-A4.t Mon Jan 22 17:53:02 2018 -0500
@@ -65,10 +65,11 @@
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(A0)'`
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg log -G --hidden
@ e5ea8f9c7314 (draft): A1
|
- | o 06055a7959d4 (draft): B
+ | * 06055a7959d4 (draft): B
| |
| x 28b51eb45704 (draft): A0
|/
--- a/tests/test-exchange-obsmarkers-case-B5.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-B5.t Mon Jan 22 17:53:02 2018 -0500
@@ -71,6 +71,7 @@
$ mkcommit B1
$ hg debugobsolete --hidden `getid 'desc(A0)'` `getid 'desc(A1)'`
obsoleted 1 changesets
+ 2 new orphan changesets
$ hg debugobsolete --hidden aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(B0)'`
$ hg debugobsolete --hidden `getid 'desc(B0)'` `getid 'desc(B1)'`
obsoleted 1 changesets
@@ -80,7 +81,7 @@
|
@ e5ea8f9c7314 (draft): A1
|
- | o 1d0f3cd25300 (draft): C
+ | * 1d0f3cd25300 (draft): C
| |
| x 6e72f0a95b5e (draft): B0
| |
--- a/tests/test-exchange-obsmarkers-case-C1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-C1.t Mon Jan 22 17:53:02 2018 -0500
@@ -58,6 +58,7 @@
$ mkcommit A
$ mkcommit B
$ hg prune -qd '0 0' '.~1'
+ 1 new orphan changesets
$ hg prune -qd '0 0' .
$ hg log -G --hidden
x f6fbb35d8ac9 (draft): B
--- a/tests/test-exchange-obsmarkers-case-C4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-C4.t Mon Jan 22 17:53:02 2018 -0500
@@ -67,6 +67,7 @@
$ hg debugobsolete --hidden `getid 'desc(A)'` `getid 'desc(B)'`
obsoleted 1 changesets
$ hg debugobsolete --hidden `getid 'desc(A)'` `getid 'desc(C)'`
+ 2 new content-divergent changesets
$ hg prune -qd '0 0' .
$ hg log -G --hidden
x 7f7f229b13a6 (draft): C
--- a/tests/test-exchange-obsmarkers-case-D1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-D1.t Mon Jan 22 17:53:02 2018 -0500
@@ -62,6 +62,7 @@
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg prune -d '0 0' 'desc(B)'
obsoleted 1 changesets
$ hg strip --hidden -q 'desc(A0)'
--- a/tests/test-exchange-obsmarkers-case-D4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-exchange-obsmarkers-case-D4.t Mon Jan 22 17:53:02 2018 -0500
@@ -60,6 +60,7 @@
$ mkcommit B1
$ hg debugobsolete `getid 'desc(A0)'` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(A1)'`
$ hg debugobsolete `getid 'desc(B0)'` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
obsoleted 1 changesets
--- a/tests/test-extdata.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-extdata.t Mon Jan 22 17:53:02 2018 -0500
@@ -46,8 +46,8 @@
test non-zero exit of shell command
$ hg log -qr "extdata(emptygrep)"
- $ hg log -qr "extdata(emptygrep)" --debug
- extdata command 'cat extdata.txt | grep empty' exited with status * (glob)
+ abort: extdata command 'cat extdata.txt | grep empty' failed: exited with status 1
+ [255]
test bad extdata() revset source
@@ -88,8 +88,7 @@
$ mkdir sub
$ cd sub
$ hg log -qr "extdata(filedata)"
- abort: error: The system cannot find the file specified (windows !)
- abort: error: No such file or directory (no-windows !)
+ abort: error: $ENOENT$
[255]
$ hg log -qr "extdata(shelldata)"
2:f6ed99a58333
--- a/tests/test-extension.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-extension.t Mon Jan 22 17:53:02 2018 -0500
@@ -180,7 +180,7 @@
> EOF
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadabs=loadabs.py root)
ambigabs.s=libroot/ambig.py
- $TESTTMP/a (glob)
+ $TESTTMP/a
#if no-py3k
$ cat > $TESTTMP/libroot/mod/ambigrel.py <<EOF
@@ -194,7 +194,7 @@
> EOF
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadrel=loadrel.py root)
ambigrel.s=libroot/mod/ambig.py
- $TESTTMP/a (glob)
+ $TESTTMP/a
#endif
Check absolute/relative import of extension specific modules
@@ -245,7 +245,7 @@
(extroot) import extroot: this is extroot.__init__
(extroot) from extroot.bar import s: this is extroot.bar
(extroot) import extroot.bar in func(): this is extroot.bar
- $TESTTMP/a (glob)
+ $TESTTMP/a
#if no-py3k
$ rm "$TESTTMP"/extroot/foo.*
@@ -277,7 +277,7 @@
(extroot) import sub1: this is extroot.sub1.__init__
(extroot) from bar import s: this is extroot.bar
(extroot) import bar in func(): this is extroot.bar
- $TESTTMP/a (glob)
+ $TESTTMP/a
#endif
#if demandimport
@@ -534,7 +534,7 @@
Mercurial Distributed SCM (version *) (glob)
(see https://mercurial-scm.org for more information)
- Copyright (C) 2005-2017 Matt Mackall and others
+ Copyright (C) 2005-* Matt Mackall and others (glob)
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
@@ -1225,7 +1225,7 @@
> cmdtable = None
> EOF
$ hg --config extensions.path=./path.py help foo > /dev/null
- warning: error finding commands in $TESTTMP/hgext/forest.py (glob)
+ warning: error finding commands in $TESTTMP/hgext/forest.py
abort: no such help topic: foo
(try 'hg help --keyword foo')
[255]
@@ -1503,17 +1503,17 @@
$ echo '# enable extension locally' >> src/.hg/hgrc
$ echo "reposetuptest = $TESTTMP/reposetuptest.py" >> src/.hg/hgrc
$ hg -R src status
- reposetup() for $TESTTMP/reposetup-test/src (glob)
- reposetup() for $TESTTMP/reposetup-test/src (glob) (chg !)
+ reposetup() for $TESTTMP/reposetup-test/src
+ reposetup() for $TESTTMP/reposetup-test/src (chg !)
$ hg clone -U src clone-dst1
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ hg init push-dst1
$ hg -q -R src push push-dst1
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ hg init pull-src1
$ hg -q -R pull-src1 pull src
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ cat <<EOF >> $HGRCPATH
> [extensions]
@@ -1521,13 +1521,13 @@
> reposetuptest = !
> EOF
$ hg clone -U src clone-dst2
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ hg init push-dst2
$ hg -q -R src push push-dst2
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ hg init pull-src2
$ hg -q -R pull-src2 pull src
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
$ cat <<EOF >> $HGRCPATH
> [extensions]
@@ -1535,32 +1535,32 @@
> reposetuptest = $TESTTMP/reposetuptest.py
> EOF
$ hg clone -U src clone-dst3
- reposetup() for $TESTTMP/reposetup-test/src (glob)
- reposetup() for $TESTTMP/reposetup-test/clone-dst3 (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
+ reposetup() for $TESTTMP/reposetup-test/clone-dst3
$ hg init push-dst3
- reposetup() for $TESTTMP/reposetup-test/push-dst3 (glob)
+ reposetup() for $TESTTMP/reposetup-test/push-dst3
$ hg -q -R src push push-dst3
- reposetup() for $TESTTMP/reposetup-test/src (glob)
- reposetup() for $TESTTMP/reposetup-test/push-dst3 (glob)
+ reposetup() for $TESTTMP/reposetup-test/src
+ reposetup() for $TESTTMP/reposetup-test/push-dst3
$ hg init pull-src3
- reposetup() for $TESTTMP/reposetup-test/pull-src3 (glob)
+ reposetup() for $TESTTMP/reposetup-test/pull-src3
$ hg -q -R pull-src3 pull src
- reposetup() for $TESTTMP/reposetup-test/pull-src3 (glob)
- reposetup() for $TESTTMP/reposetup-test/src (glob)
+ reposetup() for $TESTTMP/reposetup-test/pull-src3
+ reposetup() for $TESTTMP/reposetup-test/src
$ echo '[extensions]' >> src/.hg/hgrc
$ echo '# disable extension locally' >> src/.hg/hgrc
$ echo 'reposetuptest = !' >> src/.hg/hgrc
$ hg clone -U src clone-dst4
- reposetup() for $TESTTMP/reposetup-test/clone-dst4 (glob)
+ reposetup() for $TESTTMP/reposetup-test/clone-dst4
$ hg init push-dst4
- reposetup() for $TESTTMP/reposetup-test/push-dst4 (glob)
+ reposetup() for $TESTTMP/reposetup-test/push-dst4
$ hg -q -R src push push-dst4
- reposetup() for $TESTTMP/reposetup-test/push-dst4 (glob)
+ reposetup() for $TESTTMP/reposetup-test/push-dst4
$ hg init pull-src4
- reposetup() for $TESTTMP/reposetup-test/pull-src4 (glob)
+ reposetup() for $TESTTMP/reposetup-test/pull-src4
$ hg -q -R pull-src4 pull src
- reposetup() for $TESTTMP/reposetup-test/pull-src4 (glob)
+ reposetup() for $TESTTMP/reposetup-test/pull-src4
disabling in command line overlays with all configuration
$ hg --config extensions.reposetuptest=! clone -U src clone-dst5
@@ -1605,8 +1605,8 @@
$ echo "reposetuptest = $TESTTMP/reposetuptest.py" >> parent/.hg/hgrc
$ cp parent/.hg/hgrc parent/sub2/.hg/hgrc
$ hg -R parent status -S -A
- reposetup() for $TESTTMP/reposetup-test/parent (glob)
- reposetup() for $TESTTMP/reposetup-test/parent/sub2 (glob)
+ reposetup() for $TESTTMP/reposetup-test/parent
+ reposetup() for $TESTTMP/reposetup-test/parent/sub2
C .hgsub
C .hgsubstate
C sub1/1
--- a/tests/test-fileset.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-fileset.t Mon Jan 22 17:53:02 2018 -0500
@@ -27,6 +27,24 @@
(string 're:a\\d')
a1
a2
+ $ fileset -v '!re:"a\d"'
+ (not
+ (kindpat
+ (symbol 're')
+ (string 'a\\d')))
+ b1
+ b2
+ $ fileset -v 'path:a1 or glob:b?'
+ (or
+ (kindpat
+ (symbol 'path')
+ (symbol 'a1'))
+ (kindpat
+ (symbol 'glob')
+ (symbol 'b?')))
+ a1
+ b1
+ b2
$ fileset -v 'a1 or a2'
(or
(symbol 'a1')
@@ -53,6 +71,49 @@
hg: parse error: invalid \x escape
[255]
+Test invalid syntax
+
+ $ fileset -v '"added"()'
+ (func
+ (string 'added')
+ None)
+ hg: parse error: not a symbol
+ [255]
+ $ fileset -v '()()'
+ (func
+ (group
+ None)
+ None)
+ hg: parse error: not a symbol
+ [255]
+ $ fileset -v -- '-x'
+ (negate
+ (symbol 'x'))
+ hg: parse error: can't use negate operator in this context
+ [255]
+ $ fileset -v -- '-()'
+ (negate
+ (group
+ None))
+ hg: parse error: can't use negate operator in this context
+ [255]
+
+ $ fileset '"path":.'
+ hg: parse error: not a symbol
+ [255]
+ $ fileset 'path:foo bar'
+ hg: parse error at 9: invalid token
+ [255]
+ $ fileset 'foo:bar:baz'
+ hg: parse error: not a symbol
+ [255]
+ $ fileset 'foo:bar()'
+ hg: parse error: pattern must be a string
+ [255]
+ $ fileset 'foo:bar'
+ hg: parse error: invalid pattern kind: foo
+ [255]
+
Test files status
$ rm a1
@@ -199,7 +260,7 @@
merging b2
warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
* files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ fileset 'resolved()'
$ fileset 'unresolved()'
@@ -319,6 +380,9 @@
$ fileset -r4 'subrepo("re:su.*")'
sub
sub2
+ $ fileset -r4 'subrepo(re:su.*)'
+ sub
+ sub2
$ fileset -r4 'subrepo("sub")'
sub
$ fileset -r4 'b2 or c1'
--- a/tests/test-flagprocessor.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-flagprocessor.t Mon Jan 22 17:53:02 2018 -0500
@@ -81,7 +81,7 @@
# Push to the server
$ hg push
- pushing to $TESTTMP/server (glob)
+ pushing to $TESTTMP/server
searching for changes
adding changesets
adding manifests
@@ -101,7 +101,7 @@
# Pull from server and update to latest revision
$ hg pull default
- pulling from $TESTTMP/server (glob)
+ pulling from $TESTTMP/server
requesting all changes
adding changesets
adding manifests
--- a/tests/test-fncache.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-fncache.t Mon Jan 22 17:53:02 2018 -0500
@@ -14,7 +14,7 @@
$ mkdir a.i
$ echo "some other text" > a.i/b
$ hg add
- adding a.i/b (glob)
+ adding a.i/b
$ hg ci -m second
$ cat .hg/store/fncache | sort
data/a.i
@@ -25,7 +25,7 @@
$ mkdir a.i.hg
$ echo "yet another text" > a.i.hg/c
$ hg add
- adding a.i.hg/c (glob)
+ adding a.i.hg/c
$ hg ci -m third
$ cat .hg/store/fncache | sort
data/a.i
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-fuzz-targets.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,5 @@
+#require clang-libfuzzer test-repo
+ $ cd $TESTDIR/../contrib/fuzz
+ $ make
+Just run the fuzzer for five seconds to verify it works at all.
+ $ ./bdiff -max_total_time 5
--- a/tests/test-generaldelta.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-generaldelta.t Mon Jan 22 17:53:02 2018 -0500
@@ -154,7 +154,7 @@
Test that strip bundle use bundle2
$ hg --config extensions.strip= strip .
0 files updated, 0 files merged, 5 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg (glob)
+ saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg
$ hg debugbundle .hg/strip-backup/*
Stream params: {Compression: BZ}
changegroup -- {nbchanges: 1, version: 02}
--- a/tests/test-getbundle.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-getbundle.t Mon Jan 22 17:53:02 2018 -0500
@@ -264,9 +264,9 @@
$ cat access.log
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=700b7e19db54103633c4bf4a6a6b6d55f4d50c03+d5f6e1ea452285324836a49d7d3c2a63cfed1d31&heads=13c0170174366b441dc68e8e33757232fa744458+bac16991d12ff45f9dc43c52da1946dfadb83e80 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=700b7e19db54103633c4bf4a6a6b6d55f4d50c03+d5f6e1ea452285324836a49d7d3c2a63cfed1d31&heads=13c0170174366b441dc68e8e33757232fa744458+bac16991d12ff45f9dc43c52da1946dfadb83e80 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
$ cat error.log
--- a/tests/test-git-export.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-git-export.t Mon Jan 22 17:53:02 2018 -0500
@@ -99,7 +99,7 @@
warning: dir2 not inside relative root dir1
$ hg diff --git --root dir1 -r 1:tip 'dir2/{copy}'
- warning: dir2/{copy} not inside relative root dir1 (glob)
+ warning: dir2/{copy} not inside relative root dir1
$ cd dir1
$ hg diff --git --root .. -r 1:tip
@@ -161,7 +161,7 @@
new
+copy1
$ hg diff --git --root . -r 1:tip ../dir2
- warning: ../dir2 not inside relative root . (glob)
+ warning: ../dir2 not inside relative root .
$ hg diff --git --root . -r 1:tip '../dir2/*'
warning: ../dir2/* not inside relative root . (glob)
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-githelp.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,296 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > githelp =
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+ $ echo foo > test_file
+ $ mkdir dir
+ $ echo foo > dir/file
+ $ echo foo > removed_file
+ $ echo foo > deleted_file
+ $ hg add -q .
+ $ hg commit -m 'bar'
+ $ hg bookmark both
+ $ touch both
+ $ touch untracked_file
+ $ hg remove removed_file
+ $ rm deleted_file
+
+githelp on a single command should succeed
+ $ hg githelp -- commit
+ hg commit
+ $ hg githelp -- git commit
+ hg commit
+
+githelp should fail nicely if we don't give it arguments
+ $ hg githelp
+ abort: missing git command - usage: hg githelp -- <git command>
+ [255]
+ $ hg githelp -- git
+ abort: missing git command - usage: hg githelp -- <git command>
+ [255]
+
+githelp on a command with options should succeed
+ $ hg githelp -- commit -pm "abc"
+ hg commit --interactive -m 'abc'
+
+githelp on a command with standalone unrecognized option should succeed with warning
+ $ hg githelp -- commit -p -v
+ ignoring unknown option -v
+ hg commit --interactive
+
+githelp on a command with unrecognized option packed with other options should fail with error
+ $ hg githelp -- commit -pv
+ abort: unknown option v packed with other options
+ Please try passing the option as it's own flag: -v
+ [255]
+
+githelp for git rebase --skip
+ $ hg githelp -- git rebase --skip
+ hg revert --all -r .
+ hg rebase --continue
+
+githelp for git commit --amend (hg commit --amend pulls up an editor)
+ $ hg githelp -- commit --amend
+ hg commit --amend
+
+githelp for git commit --amend --no-edit (hg amend does not pull up an editor)
+ $ hg githelp -- commit --amend --no-edit
+ hg amend
+
+githelp for git checkout -- . (checking out a directory)
+ $ hg githelp -- checkout -- .
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert .
+
+githelp for git checkout "HEAD^" (should still work to pass a rev)
+ $ hg githelp -- checkout "HEAD^"
+ hg update .^
+
+githelp checkout: args after -- should be treated as paths no matter what
+ $ hg githelp -- checkout -- HEAD
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert HEAD
+
+githelp for git checkout with rev and path
+ $ hg githelp -- checkout "HEAD^" -- file.txt
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert -r .^ file.txt
+
+githelp for git with rev and path, without separator
+ $ hg githelp -- checkout "HEAD^" file.txt
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert -r .^ file.txt
+
+githelp for checkout with a file as first argument
+ $ hg githelp -- checkout test_file
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert test_file
+
+githelp for checkout with a removed file as first argument
+ $ hg githelp -- checkout removed_file
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert removed_file
+
+githelp for checkout with a deleted file as first argument
+ $ hg githelp -- checkout deleted_file
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert deleted_file
+
+githelp for checkout with a untracked file as first argument
+ $ hg githelp -- checkout untracked_file
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert untracked_file
+
+githelp for checkout with a directory as first argument
+ $ hg githelp -- checkout dir
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert dir
+
+githelp for checkout when not in repo root
+ $ cd dir
+ $ hg githelp -- checkout file
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert file
+
+ $ cd ..
+
+githelp for checkout with an argument that is both a file and a revision
+ $ hg githelp -- checkout both
+ hg update both
+
+githelp for checkout with the -p option
+ $ hg githelp -- git checkout -p xyz
+ hg revert -i -r xyz
+
+ $ hg githelp -- git checkout -p xyz -- abc
+ note: use --no-backup to avoid creating .orig files
+
+ hg revert -i -r xyz abc
+
+githelp for checkout with the -f option and a rev
+ $ hg githelp -- git checkout -f xyz
+ hg update -C xyz
+ $ hg githelp -- git checkout --force xyz
+ hg update -C xyz
+
+githelp for checkout with the -f option without an arg
+ $ hg githelp -- git checkout -f
+ hg revert --all
+ $ hg githelp -- git checkout --force
+ hg revert --all
+
+githelp for grep with pattern and path
+ $ hg githelp -- grep shrubbery flib/intern/
+ hg grep shrubbery flib/intern/
+
+githelp for reset, checking ~ in git becomes ~1 in mercurial
+ $ hg githelp -- reset HEAD~
+ hg update .~1
+ $ hg githelp -- reset "HEAD^"
+ hg update .^
+ $ hg githelp -- reset HEAD~3
+ hg update .~3
+
+ $ hg githelp -- reset --mixed HEAD
+ NOTE: --mixed has no meaning since Mercurial has no staging area
+
+ hg update .
+ $ hg githelp -- reset --soft HEAD
+ NOTE: --soft has no meaning since Mercurial has no staging area
+
+ hg update .
+ $ hg githelp -- reset --hard HEAD
+ hg update --clean .
+
+githelp for git show --name-status
+ $ hg githelp -- git show --name-status
+ hg log --style status -r .
+
+githelp for git show --pretty=format: --name-status
+ $ hg githelp -- git show --pretty=format: --name-status
+ hg status --change .
+
+githelp for show with no arguments
+ $ hg githelp -- show
+ hg export
+
+githelp for show with a path
+ $ hg githelp -- show test_file
+ hg cat test_file
+
+githelp for show with not a path:
+ $ hg githelp -- show rev
+ hg export rev
+
+githelp for show with many arguments
+ $ hg githelp -- show argone argtwo
+ hg export argone argtwo
+ $ hg githelp -- show test_file argone argtwo
+ hg cat test_file argone argtwo
+
+githelp for show with --unified options
+ $ hg githelp -- show --unified=10
+ hg export --config diff.unified=10
+ $ hg githelp -- show -U100
+ hg export --config diff.unified=100
+
+githelp for show with a path and --unified
+ $ hg githelp -- show -U20 test_file
+ hg cat test_file --config diff.unified=20
+
+githelp for stash drop without name
+ $ hg githelp -- git stash drop
+ hg shelve -d <shelve name>
+
+githelp for stash drop with name
+ $ hg githelp -- git stash drop xyz
+ hg shelve -d xyz
+
+githelp for whatchanged should show deprecated message
+ $ hg githelp -- whatchanged -p
+ This command has been deprecated in the git project, thus isn't supported by this tool.
+
+
+githelp for git branch -m renaming
+ $ hg githelp -- git branch -m old new
+ hg bookmark -m old new
+
+When the old name is omitted, git branch -m new renames the current branch.
+ $ hg githelp -- git branch -m new
+ hg bookmark -m `hg log -T"{activebookmark}" -r .` new
+
+Branch deletion in git strips commits
+ $ hg githelp -- git branch -d
+ hg strip -B
+ $ hg githelp -- git branch -d feature
+ hg strip -B feature -B
+ $ hg githelp -- git branch --delete experiment1 experiment2
+ hg strip -B experiment1 -B experiment2 -B
+
+githelp for reuse message using the shorthand
+ $ hg githelp -- git commit -C deadbeef
+ hg commit -M deadbeef
+
+githelp for reuse message using the the long version
+ $ hg githelp -- git commit --reuse-message deadbeef
+ hg commit -M deadbeef
+
+githelp for apply with no options
+ $ hg githelp -- apply
+ hg import --no-commit
+
+githelp for apply with directory strip custom
+ $ hg githelp -- apply -p 5
+ hg import --no-commit -p 5
+
+git merge-base
+ $ hg githelp -- git merge-base --is-ancestor
+ ignoring unknown option --is-ancestor
+ NOTE: ancestors() is part of the revset language.
+ Learn more about revsets with 'hg help revsets'
+
+ hg log -T '{node}\n' -r 'ancestor(A,B)'
+
+githelp for git blame
+ $ hg githelp -- git blame
+ hg annotate -udl
+
+githelp for add
+
+ $ hg githelp -- git add
+ hg add
+
+ $ hg githelp -- git add -p
+ note: Mercurial will commit when complete, as there is no staging area in Mercurial
+
+ hg commit --interactive
+
+ $ hg githelp -- git add --all
+ note: use hg addremove to remove files that have been deleted.
+
+ hg add
+
+githelp for reflog
+
+ $ hg githelp -- git reflog
+ hg journal
+
+ note: in hg commits can be deleted from repo but we always have backups.
+
+ $ hg githelp -- git reflog --all
+ hg journal --all
+
+ note: in hg commits can be deleted from repo but we always have backups.
--- a/tests/test-globalopts.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-globalopts.t Mon Jan 22 17:53:02 2018 -0500
@@ -88,7 +88,7 @@
abort: no repository found in '$TESTTMP' (.hg not found)!
[255]
$ hg -R b ann a/a
- abort: a/a not under root '$TESTTMP/b' (glob)
+ abort: a/a not under root '$TESTTMP/b'
(consider using '--cwd b')
[255]
$ hg log
@@ -355,6 +355,7 @@
environment Environment Variables
extensions Using Additional Features
filesets Specifying File Sets
+ flags Command-line flags
glossary Glossary
hgignore Syntax for Mercurial Ignore Files
hgweb Configuring hgweb
@@ -439,6 +440,7 @@
environment Environment Variables
extensions Using Additional Features
filesets Specifying File Sets
+ flags Command-line flags
glossary Glossary
hgignore Syntax for Mercurial Ignore Files
hgweb Configuring hgweb
--- a/tests/test-glog.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-glog.t Mon Jan 22 17:53:02 2018 -0500
@@ -88,21 +88,34 @@
> commands,
> extensions,
> revsetlang,
+ > smartset,
> )
>
+ > def logrevset(repo, pats, opts):
+ > revs = cmdutil._logrevs(repo, opts)
+ > if not revs:
+ > return None
+ > match, pats, slowpath = cmdutil._makelogmatcher(repo, revs, pats, opts)
+ > return cmdutil._makelogrevset(repo, match, pats, slowpath, opts)
+ >
> def uisetup(ui):
- > def printrevset(orig, ui, repo, *pats, **opts):
+ > def printrevset(orig, repo, pats, opts):
+ > revs, filematcher = orig(repo, pats, opts)
> if opts.get('print_revset'):
- > expr = cmdutil.getgraphlogrevs(repo, pats, opts)[1]
+ > expr = logrevset(repo, pats, opts)
> if expr:
> tree = revsetlang.parse(expr)
+ > tree = revsetlang.analyze(tree)
> else:
> tree = []
+ > ui = repo.ui
> ui.write('%r\n' % (opts.get('rev', []),))
> ui.write(revsetlang.prettyformat(tree) + '\n')
- > return 0
- > return orig(ui, repo, *pats, **opts)
- > entry = extensions.wrapcommand(commands.table, 'log', printrevset)
+ > ui.write(smartset.prettyformat(revs) + '\n')
+ > revs = smartset.baseset() # display no revisions
+ > return revs, filematcher
+ > extensions.wrapfunction(cmdutil, 'getlogrevs', printrevset)
+ > aliases, entry = cmdutil.findcmd('log', commands.table)
> entry[1].append(('', 'print-revset', False,
> 'print generated revset and exit (DEPRECATED)'))
> EOF
@@ -1445,6 +1458,7 @@
$ testlog -r 27 -r 25 -r 21 -r 34 -r 32 -r 31
['27', '25', '21', '34', '32', '31']
[]
+ <baseset- [21, 25, 27, 31, 32, 34]>
--- log.nodes * (glob)
+++ glog.nodes * (glob)
@@ -1,6 +1,6 @@
@@ -1459,90 +1473,126 @@
+nodetag 21
$ testlog -u test -u not-a-user
[]
- (group
- (group
- (or
- (list
- (func
- (symbol 'user')
- (string 'test'))
- (func
- (symbol 'user')
- (string 'not-a-user'))))))
+ (or
+ (list
+ (func
+ (symbol 'user')
+ (string 'test'))
+ (func
+ (symbol 'user')
+ (string 'not-a-user'))))
+ <filteredset
+ <spanset- 0:37>,
+ <addset
+ <filteredset
+ <fullreposet+ 0:37>,
+ <user 'test'>>,
+ <filteredset
+ <fullreposet+ 0:37>,
+ <user 'not-a-user'>>>>
$ testlog -b not-a-branch
abort: unknown revision 'not-a-branch'!
abort: unknown revision 'not-a-branch'!
abort: unknown revision 'not-a-branch'!
$ testlog -b 35 -b 36 --only-branch branch
[]
- (group
- (group
+ (or
+ (list
+ (func
+ (symbol 'branch')
+ (string 'default'))
(or
(list
(func
(symbol 'branch')
- (string 'default'))
- (func
- (symbol 'branch')
(string 'branch'))
(func
(symbol 'branch')
(string 'branch'))))))
+ <filteredset
+ <spanset- 0:37>,
+ <addset
+ <filteredset
+ <fullreposet+ 0:37>,
+ <branch 'default'>>,
+ <addset
+ <filteredset
+ <fullreposet+ 0:37>,
+ <branch 'branch'>>,
+ <filteredset
+ <fullreposet+ 0:37>,
+ <branch 'branch'>>>>>
$ testlog -k expand -k merge
[]
- (group
- (group
- (or
- (list
- (func
- (symbol 'keyword')
- (string 'expand'))
- (func
- (symbol 'keyword')
- (string 'merge'))))))
+ (or
+ (list
+ (func
+ (symbol 'keyword')
+ (string 'expand'))
+ (func
+ (symbol 'keyword')
+ (string 'merge'))))
+ <filteredset
+ <spanset- 0:37>,
+ <addset
+ <filteredset
+ <fullreposet+ 0:37>,
+ <keyword 'expand'>>,
+ <filteredset
+ <fullreposet+ 0:37>,
+ <keyword 'merge'>>>>
$ testlog --only-merges
[]
- (group
+ (func
+ (symbol 'merge')
+ None)
+ <filteredset
+ <spanset- 0:37>,
+ <merge>>
+ $ testlog --no-merges
+ []
+ (not
(func
(symbol 'merge')
None))
- $ testlog --no-merges
- []
- (group
- (not
- (func
- (symbol 'merge')
- None)))
+ <filteredset
+ <spanset- 0:37>,
+ <not
+ <filteredset
+ <spanset- 0:37>,
+ <merge>>>>
$ testlog --date '2 0 to 4 0'
[]
- (group
- (func
- (symbol 'date')
- (string '2 0 to 4 0')))
+ (func
+ (symbol 'date')
+ (string '2 0 to 4 0'))
+ <filteredset
+ <spanset- 0:37>,
+ <date '2 0 to 4 0'>>
$ hg log -G -d 'brace ) in a date'
hg: parse error: invalid date: 'brace ) in a date'
[255]
$ testlog --prune 31 --prune 32
[]
- (group
- (group
- (and
- (not
- (group
- (or
- (list
- (string '31')
- (func
- (symbol 'ancestors')
- (string '31'))))))
- (not
- (group
- (or
- (list
- (string '32')
- (func
- (symbol 'ancestors')
- (string '32')))))))))
+ (not
+ (or
+ (list
+ (func
+ (symbol 'ancestors')
+ (string '31'))
+ (func
+ (symbol 'ancestors')
+ (string '32')))))
+ <filteredset
+ <spanset- 0:37>,
+ <not
+ <addset
+ <filteredset
+ <spanset- 0:37>,
+ <generatorsetdesc+>>,
+ <filteredset
+ <spanset- 0:37>,
+ <generatorsetdesc+>>>>>
Dedicated repo for --follow and paths filtering. The g is crafted to
have 2 filelog topological heads in a linear changeset graph.
@@ -1553,9 +1603,11 @@
$ testlog --follow
[]
[]
+ <baseset []>
$ testlog -rnull
['null']
[]
+ <baseset [-1]>
$ echo a > a
$ echo aa > aa
$ echo f > f
@@ -1589,53 +1641,60 @@
$ testlog a
[]
- (group
- (group
- (func
- (symbol 'filelog')
- (string 'a'))))
+ (func
+ (symbol 'filelog')
+ (string 'a'))
+ <filteredset
+ <spanset- 0:5>, set([0])>
$ testlog a b
[]
- (group
- (group
- (or
- (list
- (func
- (symbol 'filelog')
- (string 'a'))
- (func
- (symbol 'filelog')
- (string 'b'))))))
+ (or
+ (list
+ (func
+ (symbol 'filelog')
+ (string 'a'))
+ (func
+ (symbol 'filelog')
+ (string 'b'))))
+ <filteredset
+ <spanset- 0:5>,
+ <addset
+ <baseset+ [0]>,
+ <baseset+ [1]>>>
Test falling back to slow path for non-existing files
$ testlog a c
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:a')
- (string 'p:c'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')
+ (string 'p:c')))
+ <filteredset
+ <spanset- 0:5>,
+ <matchfiles patterns=['a', 'c'], include=[] exclude=[], default='relpath', rev=None>>
Test multiple --include/--exclude/paths
$ testlog --include a --include e --exclude b --exclude e a e
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:a')
- (string 'p:e')
- (string 'i:a')
- (string 'i:e')
- (string 'x:b')
- (string 'x:e'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')
+ (string 'p:e')
+ (string 'i:a')
+ (string 'i:e')
+ (string 'x:b')
+ (string 'x:e')))
+ <filteredset
+ <spanset- 0:5>,
+ <matchfiles patterns=['a', 'e'], include=['a', 'e'] exclude=['b', 'e'], default='relpath', rev=None>>
Test glob expansion of pats
@@ -1647,11 +1706,11 @@
> testlog a*;
> fi;
[]
- (group
- (group
- (func
- (symbol 'filelog')
- (string 'aa'))))
+ (func
+ (symbol 'filelog')
+ (string 'aa'))
+ <filteredset
+ <spanset- 0:5>, set([0])>
Test --follow on a non-existent directory
@@ -1665,17 +1724,15 @@
$ hg up -q '.^'
$ testlog -f dir
[]
- (group
- (and
- (func
- (symbol 'ancestors')
- (symbol '.'))
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:dir')))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:dir')))
+ <filteredset
+ <generatorsetdesc->,
+ <matchfiles patterns=['dir'], include=[] exclude=[], default='relpath', rev=None>>
$ hg up -q tip
Test --follow on file not in parent revision
@@ -1689,50 +1746,39 @@
$ testlog -f 'glob:*'
[]
- (group
- (and
- (func
- (symbol 'ancestors')
- (symbol '.'))
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:glob:*')))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:glob:*')))
+ <filteredset
+ <generatorsetdesc->,
+ <matchfiles patterns=['glob:*'], include=[] exclude=[], default='relpath', rev=None>>
Test --follow on a single rename
$ hg up -q 2
$ testlog -f a
[]
- (group
- (group
- (func
- (symbol 'follow')
- (string 'a'))))
+ []
+ <generatorsetdesc->
Test --follow and multiple renames
$ hg up -q tip
$ testlog -f e
[]
- (group
- (group
- (func
- (symbol 'follow')
- (string 'e'))))
+ []
+ <generatorsetdesc->
Test --follow and multiple filelog heads
$ hg up -q 2
$ testlog -f g
[]
- (group
- (group
- (func
- (symbol 'follow')
- (string 'g'))))
+ []
+ <generatorsetdesc->
$ cat log.nodes
nodetag 2
nodetag 1
@@ -1740,11 +1786,8 @@
$ hg up -q tip
$ testlog -f g
[]
- (group
- (group
- (func
- (symbol 'follow')
- (string 'g'))))
+ []
+ <generatorsetdesc->
$ cat log.nodes
nodetag 3
nodetag 2
@@ -1754,16 +1797,8 @@
$ testlog -f g e
[]
- (group
- (group
- (or
- (list
- (func
- (symbol 'follow')
- (string 'g'))
- (func
- (symbol 'follow')
- (string 'e'))))))
+ []
+ <generatorsetdesc->
$ cat log.nodes
nodetag 4
nodetag 3
@@ -1777,6 +1812,7 @@
$ testlog -f
[]
[]
+ <baseset []>
Test --follow-first
@@ -1791,22 +1827,15 @@
$ hg ci -m "merge 5 and 4"
$ testlog --follow-first
[]
- (group
- (func
- (symbol '_firstancestors')
- (func
- (symbol 'rev')
- (symbol '6'))))
+ []
+ <generatorsetdesc->
Cannot compare with log --follow-first FILE as it never worked
$ hg log -G --print-revset --follow-first e
[]
- (group
- (group
- (func
- (symbol '_followfirst')
- (string 'e'))))
+ []
+ <generatorsetdesc->
$ hg log -G --follow-first e --template '{rev} {desc|firstline}\n'
@ 6 merge 5 and 4
|\
@@ -1838,53 +1867,59 @@
$ hg up -q 4
$ testlog "set:copied()"
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:set:copied()'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:set:copied()')))
+ <filteredset
+ <spanset- 0:7>,
+ <matchfiles patterns=['set:copied()'], include=[] exclude=[], default='relpath', rev=None>>
$ testlog --include "set:copied()"
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'i:set:copied()'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'i:set:copied()')))
+ <filteredset
+ <spanset- 0:7>,
+ <matchfiles patterns=[], include=['set:copied()'] exclude=[], default='relpath', rev=None>>
$ testlog -r "sort(file('set:copied()'), -rev)"
["sort(file('set:copied()'), -rev)"]
[]
+ <baseset []>
Test --removed
$ testlog --removed
[]
[]
+ <spanset- 0:7>
$ testlog --removed a
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:a'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')))
+ <filteredset
+ <spanset- 0:7>,
+ <matchfiles patterns=['a'], include=[] exclude=[], default='relpath', rev=None>>
$ testlog --removed --follow a
[]
- (group
- (and
- (func
- (symbol 'ancestors')
- (symbol '.'))
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:a')))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')))
+ <filteredset
+ <generatorsetdesc->,
+ <matchfiles patterns=['a'], include=[] exclude=[], default='relpath', rev=None>>
Test --patch and --stat with --follow and --follow-first
@@ -1988,6 +2023,7 @@
$ testlog -r 'foo-bar'
['foo-bar']
[]
+ <baseset [6]>
Test --follow and forward --rev
@@ -2208,52 +2244,29 @@
+g
$ testlog --follow -r6 -r8 -r5 -r7 -r4
['6', '8', '5', '7', '4']
- (group
- (func
- (symbol 'descendants')
- (func
- (symbol 'rev')
- (symbol '6'))))
+ []
+ <generatorsetdesc->
Test --follow-first and forward --rev
$ testlog --follow-first -r6 -r8 -r5 -r7 -r4
['6', '8', '5', '7', '4']
- (group
- (func
- (symbol '_firstdescendants')
- (func
- (symbol 'rev')
- (symbol '6'))))
- --- log.nodes * (glob)
- +++ glog.nodes * (glob)
- @@ -1,3 +1,3 @@
- -nodetag 6
- nodetag 8
- nodetag 7
- +nodetag 6
+ []
+ <generatorsetdesc->
Test --follow and backward --rev
$ testlog --follow -r6 -r5 -r7 -r8 -r4
['6', '5', '7', '8', '4']
- (group
- (func
- (symbol 'ancestors')
- (func
- (symbol 'rev')
- (symbol '6'))))
+ []
+ <generatorsetdesc->
Test --follow-first and backward --rev
$ testlog --follow-first -r6 -r5 -r7 -r8 -r4
['6', '5', '7', '8', '4']
- (group
- (func
- (symbol '_firstancestors')
- (func
- (symbol 'rev')
- (symbol '6'))))
+ []
+ <generatorsetdesc->
Test --follow with --rev of graphlog extension
@@ -2269,27 +2282,26 @@
$ cd dir
$ testlog .
[]
- (group
- (func
- (symbol '_matchfiles')
- (list
- (string 'r:')
- (string 'd:relpath')
- (string 'p:.'))))
+ (func
+ (symbol '_matchfiles')
+ (list
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:.')))
+ <filteredset
+ <spanset- 0:9>,
+ <matchfiles patterns=['.'], include=[] exclude=[], default='relpath', rev=None>>
$ testlog ../b
[]
- (group
- (group
- (func
- (symbol 'filelog')
- (string '../b'))))
+ (func
+ (symbol 'filelog')
+ (string '../b'))
+ <filteredset
+ <spanset- 0:9>, set([1])>
$ testlog -f ../b
[]
- (group
- (group
- (func
- (symbol 'follow')
- (string 'b'))))
+ []
+ <generatorsetdesc->
$ cd ..
Test --hidden
@@ -2305,9 +2317,11 @@
$ testlog
[]
[]
+ <spanset- 0:9>
$ testlog --hidden
[]
[]
+ <spanset- 0:9>
$ hg log -G --template '{rev} {desc}\n'
o 7 Added tag foo-bar for changeset fc281d8ff18d
|
--- a/tests/test-graft.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-graft.t Mon Jan 22 17:53:02 2018 -0500
@@ -811,7 +811,7 @@
$ hg up -qC 7
$ hg tag -l -r 13 tmp
$ hg --config extensions.strip= strip 2
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-d323a1e4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-d323a1e4-backup.hg
$ hg graft tmp
skipping already grafted revision 8:7a4785234d87 (2:ef0ef43d49e7 also has unknown origin 5c095ad7e90f)
[255]
--- a/tests/test-hardlinks.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hardlinks.t Mon Jan 22 17:53:02 2018 -0500
@@ -155,7 +155,7 @@
$ cd r3
$ hg push
- pushing to $TESTTMP/r1 (glob)
+ pushing to $TESTTMP/r1
searching for changes
adding changesets
adding manifests
--- a/tests/test-help.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-help.t Mon Jan 22 17:53:02 2018 -0500
@@ -110,6 +110,7 @@
environment Environment Variables
extensions Using Additional Features
filesets Specifying File Sets
+ flags Command-line flags
glossary Glossary
hgignore Syntax for Mercurial Ignore Files
hgweb Configuring hgweb
@@ -188,6 +189,7 @@
environment Environment Variables
extensions Using Additional Features
filesets Specifying File Sets
+ flags Command-line flags
glossary Glossary
hgignore Syntax for Mercurial Ignore Files
hgweb Configuring hgweb
@@ -259,6 +261,7 @@
eol automatically manage newlines in repository files
extdiff command to allow external programs to compare revisions
factotum http authentication with factotum
+ githelp try mapping git commands to Mercurial commands
gpg commands to sign and verify changesets
hgk browse the repository in a graphical way
highlight syntax highlighting for hgweb (requires Pygments)
@@ -865,6 +868,7 @@
environment Environment Variables
extensions Using Additional Features
filesets Specifying File Sets
+ flags Command-line flags
glossary Glossary
hgignore Syntax for Mercurial Ignore Files
hgweb Configuring hgweb
@@ -895,6 +899,8 @@
builds a repo with a given DAG from scratch in the current
empty repo
debugbundle lists the contents of a bundle
+ debugcapabilities
+ lists the capabilities of a remote peer
debugcheckstate
validate the correctness of the current dirstate
debugcolor show available color, effects or style
@@ -914,9 +920,12 @@
show the contents of the current dirstate
debugdiscovery
runs the changeset discovery protocol in isolation
+ debugdownload
+ download a resource using Mercurial logic and config
debugextensions
show information about active extensions
debugfileset parse and apply a fileset specification
+ debugformat display format information about the current repository
debugfsinfo show information detected about current filesystem
debuggetbundle
retrieves a bundle from a repo
@@ -2011,6 +2020,13 @@
Specifying File Sets
</td></tr>
<tr><td>
+ <a href="/help/flags">
+ flags
+ </a>
+ </td><td>
+ Command-line flags
+ </td></tr>
+ <tr><td>
<a href="/help/glossary">
glossary
</a>
--- a/tests/test-hgignore.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgignore.t Mon Jan 22 17:53:02 2018 -0500
@@ -59,9 +59,9 @@
I dir/c.o
$ hg debugignore dir/c.o dir/missing.o
- dir/c.o is ignored (glob)
+ dir/c.o is ignored
(ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
- dir/missing.o is ignored (glob)
+ dir/missing.o is ignored
(ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
$ cd dir
$ hg debugignore c.o missing.o
@@ -164,7 +164,7 @@
$ echo "syntax: invalid" > .hgignore
$ hg status
- $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' (glob)
+ $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid'
A dir/b.o
? .hgignore
? a.c
@@ -236,7 +236,7 @@
$ hg debugignore a.c
a.c is not ignored
$ hg debugignore dir/c.o
- dir/c.o is ignored (glob)
+ dir/c.o is ignored
(ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob)
Check using 'include:' in ignore file
@@ -265,7 +265,7 @@
$ cp otherignore goodignore
$ echo "include:badignore" >> otherignore
$ hg status
- skipping unreadable pattern file 'badignore': No such file or directory
+ skipping unreadable pattern file 'badignore': $ENOENT$
A dir/b.o
$ mv goodignore otherignore
@@ -322,7 +322,7 @@
$ hg status | grep file2
[1]
$ hg debugignore dir1/file2
- dir1/file2 is ignored (glob)
+ dir1/file2 is ignored
(ignore rule in dir2/.hgignore, line 1: 'file*2')
#if windows
--- a/tests/test-hgrc.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgrc.t Mon Jan 22 17:53:02 2018 -0500
@@ -30,7 +30,7 @@
$ cat .hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/foo%bar (glob)
+ default = $TESTTMP/foo%bar
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -43,10 +43,10 @@
# name and email (local to this repository, optional), e.g.
# username = Jane Doe <jdoe@example.com>
$ hg paths
- default = $TESTTMP/foo%bar (glob)
+ default = $TESTTMP/foo%bar
$ hg showconfig
- bundle.mainreporoot=$TESTTMP/foobar (glob)
- paths.default=$TESTTMP/foo%bar (glob)
+ bundle.mainreporoot=$TESTTMP/foobar
+ paths.default=$TESTTMP/foo%bar
$ cd ..
issue1829: wrong indentation
@@ -242,4 +242,4 @@
$ hg showconfig --debug paths
plain: True
read config from: $TESTTMP/hgrc
- $TESTTMP/hgrc:17: paths.foo=$TESTTMP/bar (glob)
+ $TESTTMP/hgrc:17: paths.foo=$TESTTMP/bar
--- a/tests/test-hgweb-bundle.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-bundle.t Mon Jan 22 17:53:02 2018 -0500
@@ -18,7 +18,7 @@
$ hg strip -r 1
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/server/.hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg (glob)
+ saved backup bundle to $TESTTMP/server/.hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg
Serve from a bundle file
--- a/tests/test-hgweb-commands.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-commands.t Mon Jan 22 17:53:02 2018 -0500
@@ -775,7 +775,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/cad8025a2e87">branch commit with null character: </a>
- <span class="branchhead">unstable</span> <span class="tag">tip</span> <span class="tag">something</span>
+ <span class="phase">draft</span> <span class="branchhead">unstable</span> <span class="tag">tip</span> <span class="tag">something</span>
</td>
</tr>
<tr>
@@ -783,7 +783,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/1d22e65f027e">branch</a>
- <span class="branchhead">stable</span>
+ <span class="phase">draft</span> <span class="branchhead">stable</span>
</td>
</tr>
<tr>
@@ -791,7 +791,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/a4f92ed23982">Added tag 1.0 for changeset 2ef0ac749a14</a>
- <span class="branchhead">default</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span>
</td>
</tr>
<tr>
@@ -799,7 +799,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/2ef0ac749a14">base</a>
- <span class="tag">1.0</span> <span class="tag">anotherthing</span>
+ <span class="phase">draft</span> <span class="tag">1.0</span> <span class="tag">anotherthing</span>
</td>
</tr>
@@ -816,7 +816,7 @@
ajaxScrollInit(
'/shortlog/%next%',
'', <!-- NEXTHASH
- function (htmlText, previousVal) {
+ function (htmlText) {
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
@@ -880,7 +880,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
changeset 0:<a href="/rev/2ef0ac749a14">2ef0ac749a14</a>
- <span class="tag">1.0</span> <span class="tag">anotherthing</span>
+ <span class="phase">draft</span> <span class="tag">1.0</span> <span class="tag">anotherthing</span>
</h3>
@@ -902,6 +902,7 @@
<th class="date">date</th>
<td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
</tr>
+
<tr>
<th class="author">parents</th>
<td class="author"></td>
@@ -1054,7 +1055,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/2ef0ac749a14">base</a>
- <span class="tag">1.0</span> <span class="tag">anotherthing</span>
+ <span class="phase">draft</span> <span class="tag">1.0</span> <span class="tag">anotherthing</span>
</td>
</tr>
@@ -1312,7 +1313,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
view foo @ 1:<a href="/rev/a4f92ed23982">a4f92ed23982</a>
-
+ <span class="phase">draft</span> <span class="branchhead">default</span>
</h3>
@@ -1446,7 +1447,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
view foo @ 2:<a href="/rev/1d22e65f027e">1d22e65f027e</a>
- <span class="branchname">stable</span>
+ <span class="phase">draft</span> <span class="branchhead">stable</span>
</h3>
@@ -1572,7 +1573,7 @@
<table cellspacing="0">
<tr><td>description</td><td>unknown</td></tr>
<tr><td>owner</td><td>Foo Bar <foo.bar@example.com></td></tr>
- <tr><td>last change</td><td>Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
+ <tr><td>last change</td><td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
</table>
<div><a class="title" href="/shortlog?style=gitweb">changes</a></div>
@@ -1584,7 +1585,7 @@
<td>
<a class="list" href="/rev/cad8025a2e87?style=gitweb">
<b>branch commit with null character: </b>
- <span class="logtags"><span class="branchtag" title="unstable">unstable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="unstable">unstable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
</a>
</td>
<td class="link" nowrap>
@@ -1598,7 +1599,7 @@
<td>
<a class="list" href="/rev/1d22e65f027e?style=gitweb">
<b>branch</b>
- <span class="logtags"><span class="branchtag" title="stable">stable</span> </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="stable">stable</span> </span>
</a>
</td>
<td class="link" nowrap>
@@ -1612,7 +1613,7 @@
<td>
<a class="list" href="/rev/a4f92ed23982?style=gitweb">
<b>Added tag 1.0 for changeset 2ef0ac749a14</b>
- <span class="logtags"><span class="branchtag" title="default">default</span> </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> </span>
</a>
</td>
<td class="link" nowrap>
@@ -1626,7 +1627,7 @@
<td>
<a class="list" href="/rev/2ef0ac749a14?style=gitweb">
<b>base</b>
- <span class="logtags"><span class="tagtag" title="1.0">1.0</span> <span class="bookmarktag" title="anotherthing">anotherthing</span> </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="tagtag" title="1.0">1.0</span> <span class="bookmarktag" title="anotherthing">anotherthing</span> </span>
</a>
</td>
<td class="link" nowrap>
@@ -1739,7 +1740,6 @@
href="/atom-log" title="Atom feed for test"/>
<link rel="alternate" type="application/rss+xml"
href="/rss-log" title="RSS feed for test"/>
- <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
</head>
<body>
@@ -1780,66 +1780,51 @@
<noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
<div id="wrapper">
- <ul id="nodebgs"></ul>
- <canvas id="graph" width="39" height="168"></canvas>
- <ul id="graphnodes"></ul>
+ <canvas id="graph"></canvas>
+ <ul id="graphnodes"><li class="parity0" data-node="cad8025a2e87">
+ <div class="fg">
+ <span class="desc">
+ <a class="list" href="/rev/cad8025a2e87?style=gitweb"><b>branch commit with null character: </b></a>
+ </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="unstable">unstable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
+ <div class="info">1970-01-01, by test</div>
+ </div>
+ </li>
+ <li class="parity1" data-node="1d22e65f027e">
+ <div class="fg">
+ <span class="desc">
+ <a class="list" href="/rev/1d22e65f027e?style=gitweb"><b>branch</b></a>
+ </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="stable">stable</span> </span>
+ <div class="info">1970-01-01, by test</div>
+ </div>
+ </li>
+ <li class="parity0" data-node="a4f92ed23982">
+ <div class="fg">
+ <span class="desc">
+ <a class="list" href="/rev/a4f92ed23982?style=gitweb"><b>Added tag 1.0 for changeset 2ef0ac749a14</b></a>
+ </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> </span>
+ <div class="info">1970-01-01, by test</div>
+ </div>
+ </li>
+ <li class="parity1" data-node="2ef0ac749a14">
+ <div class="fg">
+ <span class="desc">
+ <a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>base</b></a>
+ </span>
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="tagtag" title="1.0">1.0</span> <span class="bookmarktag" title="anotherthing">anotherthing</span> </span>
+ <div class="info">1970-01-01, by test</div>
+ </div>
+ </li>
+ </ul>
</div>
<script>
- <!-- hide script content
-
- var data = [["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \u0000", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+ var data = [{"edges": [[0, 0, 1, 3, "FF0000"]], "graphnode": "@o", "node": "cad8025a2e87", "vertex": [0, 1]}, {"edges": [[0, 0, 1, 3, ""]], "graphnode": "o", "node": "1d22e65f027e", "vertex": [0, 1]}, {"edges": [[0, 0, 1, 3, ""]], "graphnode": "o", "node": "a4f92ed23982", "vertex": [0, 1]}, {"edges": [], "graphnode": "o", "node": "2ef0ac749a14", "vertex": [0, 1]}];
var graph = new Graph();
graph.scale(39);
-
- graph.vertex = function(x, y, color, parity, cur) {
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg parity' + parity + '"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
-
- var tagspan = '';
- if (cur[7].length || cur[8].length || (cur[6][0] != 'default' || cur[6][1])) {
- tagspan = '<span class="logtags">';
- if (cur[6][1]) {
- tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- } else if (!cur[6][1] && cur[6][0] != 'default') {
- tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- }
- if (cur[7].length) {
- for (var t in cur[7]) {
- var tag = cur[7][t];
- tagspan += '<span class="tagtag">' + tag + '</span> ';
- }
- }
- if (cur[8].length) {
- for (var t in cur[8]) {
- var bookmark = cur[8][t];
- tagspan += '<span class="bookmarktag">' + bookmark + '</span> ';
- }
- }
- tagspan += '</span>';
- }
-
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a class="list" href="/rev/' + cur[0] + '?style=gitweb" title="' + cur[0] + '"><b>' + cur[3] + '</b></a>';
- item += '</span> ' + tagspan + '';
- item += '<span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
- }
-
graph.render(data);
-
- // stop hiding script -->
</script>
<div class="extra_nav">
@@ -1850,9 +1835,12 @@
<script type="text/javascript">
ajaxScrollInit(
- '/graph/3?revcount=%next%&style=gitweb',
- 60+60,
- function (htmlText, previousVal) { return previousVal + 60; },
+ '/graph/%next%?graphtop=cad8025a2e87f88c06259790adfa15acb4080123&style=gitweb',
+ '', <!-- NEXTHASH
+ function (htmlText) {
+ var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
+ return m ? m[1] : null;
+ },
'#wrapper',
'<div class="%class%" style="text-align: center;">%text%</div>',
'graph'
@@ -1926,7 +1914,7 @@
$ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities'; echo
200 Script output follows
- lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=*zlib (glob)
+ lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=*zlib (glob)
heads
@@ -2071,30 +2059,25 @@
margin: 0;
}
- ul#nodebgs {
+ ul#graphnodes {
list-style: none inside none;
padding: 0;
margin: 0;
- top: -0.7em;
- }
-
- ul#graphnodes li, ul#nodebgs li {
- height: 39px;
}
- ul#graphnodes {
+ ul#graphnodes li {
+ height: 37px;
+ overflow: visible;
+ padding-top: 2px;
+ }
+
+ ul#graphnodes li .fg {
position: absolute;
z-index: 10;
- top: -0.85em;
- list-style: none inside none;
- padding: 0;
}
ul#graphnodes li .info {
- display: block;
font-size: 70%;
- position: relative;
- top: -1px;
}
Stop and restart the server at the directory different from the repository
@@ -2126,27 +2109,6 @@
-Stop and restart with HGENCODING=cp932 and preferuncompressed
-
- $ killdaemons.py
- $ HGENCODING=cp932 hg serve --config server.preferuncompressed=True -n test \
- > -p $HGPORT -d --pid-file=hg.pid -E errors.log
- $ cat hg.pid >> $DAEMON_PIDS
-
-commit message with Japanese Kanji 'Noh', which ends with '\x5c'
-
- $ echo foo >> foo
- $ HGENCODING=cp932 hg ci -m `$PYTHON -c 'print("\x94\x5c")'`
-
-Graph json escape of multibyte character
-
- $ get-with-headers.py $LOCALIP:$HGPORT 'graph/' > out
- >>> from __future__ import print_function
- >>> for line in open("out"):
- ... if line.startswith("var data ="):
- ... print(line, end='')
- var data = [["061dd13ba3c3", [0, 1], [[0, 0, 1, -1, ""]], "\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \u0000", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
-
capabilities
(plain version to check the format)
@@ -2174,7 +2136,7 @@
batch
stream-preferred
streamreqs=generaldelta,revlogv1
- bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
+ $USUAL_BUNDLE2_CAPS_SERVER$
unbundle=HG10GZ,HG10BZ,HG10UN
httpheader=1024
httpmediatype=0.1rx,0.1tx,0.2tx
--- a/tests/test-hgweb-csp.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-csp.t Mon Jan 22 17:53:02 2018 -0500
@@ -65,7 +65,6 @@
$ get-with-headers.py localhost:$HGPORT repo1/graph/tip | egrep 'content-security-policy|<script'
<script type="text/javascript" src="/repo1/static/mercurial.js"></script>
- <!--[if IE]><script type="text/javascript" src="/repo1/static/excanvas.js"></script><![endif]-->
<script type="text/javascript">
<script type="text/javascript">
@@ -102,7 +101,6 @@
$ get-with-headers.py localhost:$HGPORT repo1/graph/tip content-security-policy | egrep 'content-security-policy|<script'
content-security-policy: image-src 'self'; script-src https://example.com/ 'nonce-*' (glob)
<script type="text/javascript" src="/repo1/static/mercurial.js"></script>
- <!--[if IE]><script type="text/javascript" src="/repo1/static/excanvas.js"></script><![endif]-->
<script type="text/javascript" nonce="*"> (glob)
<script type="text/javascript" nonce="*"> (glob)
@@ -124,6 +122,5 @@
$ get-with-headers.py localhost:$HGPORT graph/tip content-security-policy | egrep 'content-security-policy|<script'
content-security-policy: image-src 'self'; script-src https://example.com/ 'nonce-*' (glob)
<script type="text/javascript" src="/static/mercurial.js"></script>
- <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
<script type="text/javascript" nonce="*"> (glob)
<script type="text/javascript" nonce="*"> (glob)
--- a/tests/test-hgweb-descend-empties.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-descend-empties.t Mon Jan 22 17:53:02 2018 -0500
@@ -73,7 +73,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
directory / @ 0:<a href="/rev/c9f45f7a1659">c9f45f7a1659</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -193,7 +193,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
directory / @ 0:<a href="/rev/c9f45f7a1659?style=coal">c9f45f7a1659</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -317,7 +317,7 @@
</ul>
<h2 class="no-link no-border">files</h2>
- <p class="files">/ <span class="logtags"><span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></p>
+ <p class="files">/ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></p>
<table>
<tr class="parity0">
@@ -431,7 +431,7 @@
</div>
</div>
- <div class="title">/ <span class="logtags"><span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></div>
+ <div class="title">/ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></div>
<table cellspacing="0">
<tr class="parity0">
<td style="font-family:monospace">drwxr-xr-x</td>
--- a/tests/test-hgweb-diffs.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-diffs.t Mon Jan 22 17:53:02 2018 -0500
@@ -81,7 +81,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
changeset 0:<a href="/rev/0cd96de13884">0cd96de13884</a>
-
+ <span class="phase">draft</span>
</h3>
@@ -103,6 +103,7 @@
<th class="date">date</th>
<td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
</tr>
+
<tr>
<th class="author">parents</th>
<td class="author"></td>
@@ -254,7 +255,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
diff b @ 1:<a href="/rev/559edbd9ed20">559edbd9ed20</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -376,7 +377,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
changeset 0:<a href="/rev/0cd96de13884">0cd96de13884</a>
-
+ <span class="phase">draft</span>
</h3>
@@ -398,6 +399,7 @@
<th class="date">date</th>
<td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
</tr>
+
<tr>
<th class="author">parents</th>
<td class="author"></td>
@@ -553,7 +555,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
diff a @ 1:<a href="/rev/559edbd9ed20">559edbd9ed20</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -659,7 +661,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
comparison a @ 0:<a href="/rev/0cd96de13884">0cd96de13884</a>
-
+ <span class="phase">draft</span>
</h3>
@@ -789,7 +791,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
comparison a @ 2:<a href="/rev/d73db4d812ff">d73db4d812ff</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -921,7 +923,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
comparison a @ 3:<a href="/rev/20e80271eb7a">20e80271eb7a</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -1059,7 +1061,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
comparison e @ 5:<a href="/rev/41d9fc4a6ae1">41d9fc4a6ae1</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
--- a/tests/test-hgweb-empty.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-empty.t Mon Jan 22 17:53:02 2018 -0500
@@ -96,7 +96,7 @@
ajaxScrollInit(
'/shortlog/%next%',
'', <!-- NEXTHASH
- function (htmlText, previousVal) {
+ function (htmlText) {
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
@@ -207,7 +207,7 @@
ajaxScrollInit(
'/shortlog/%next%',
'', <!-- NEXTHASH
- function (htmlText, previousVal) {
+ function (htmlText) {
var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
return m ? m[1] : null;
},
@@ -242,7 +242,6 @@
href="/atom-log" title="Atom feed for test: log" />
<link rel="alternate" type="application/rss+xml"
href="/rss-log" title="RSS feed for test: log" />
- <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
</head>
<body>
@@ -294,65 +293,15 @@
<noscript><p>The revision graph only works with JavaScript-enabled browsers.</p></noscript>
<div id="wrapper">
- <ul id="nodebgs" class="stripes2"></ul>
- <canvas id="graph" width="39" height="12"></canvas>
- <ul id="graphnodes"></ul>
+ <canvas id="graph"></canvas>
+ <ul id="graphnodes" class="stripes2"></ul>
</div>
<script type="text/javascript">
- <!-- hide script content
-
var data = [];
var graph = new Graph();
graph.scale(39);
-
- graph.vertex = function(x, y, color, parity, cur) {
-
- this.ctx.beginPath();
- color = this.setColor(color, 0.25, 0.75);
- this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
- this.ctx.fill();
-
- var bg = '<li class="bg"></li>';
- var left = (this.bg_height - this.box_size) + (this.columns + 1) * this.box_size;
- var nstyle = 'padding-left: ' + left + 'px;';
-
- var tagspan = '';
- if (cur[7].length || cur[8].length || (cur[6][0] != 'default' || cur[6][1])) {
- tagspan = '<span class="logtags">';
- if (cur[6][1]) {
- tagspan += '<span class="branchhead" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- } else if (!cur[6][1] && cur[6][0] != 'default') {
- tagspan += '<span class="branchname" title="' + cur[6][0] + '">';
- tagspan += cur[6][0] + '</span> ';
- }
- if (cur[7].length) {
- for (var t in cur[7]) {
- var tag = cur[7][t];
- tagspan += '<span class="tag">' + tag + '</span> ';
- }
- }
- if (cur[8].length) {
- for (var b in cur[8]) {
- var bookmark = cur[8][b];
- tagspan += '<span class="tag">' + bookmark + '</span> ';
- }
- }
- tagspan += '</span>';
- }
-
- var item = '<li style="' + nstyle + '"><span class="desc">';
- item += '<a href="/rev/' + cur[0] + '" title="' + cur[0] + '">' + cur[3] + '</a>';
- item += '</span>' + tagspan + '<span class="info">' + cur[5] + ', by ' + cur[4] + '</span></li>';
-
- return [bg, item];
-
- }
-
graph.render(data);
-
- // stop hiding script -->
</script>
<div class="navigate">
@@ -363,9 +312,12 @@
<script type="text/javascript">
ajaxScrollInit(
- '/graph/-1?revcount=%next%&style=paper',
- 60+60,
- function (htmlText, previousVal) { return previousVal + 60; },
+ '/graph/%next%?graphtop=0000000000000000000000000000000000000000',
+ '', <!-- NEXTHASH
+ function (htmlText) {
+ var m = htmlText.match(/'(\w+)', <!-- NEXTHASH/);
+ return m ? m[1] : null;
+ },
'#wrapper',
'<div class="%class%" style="text-align: center;">%text%</div>',
'graph'
--- a/tests/test-hgweb-filelog.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-filelog.t Mon Jan 22 17:53:02 2018 -0500
@@ -189,7 +189,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log a @ 4:<a href="/rev/3f41bc784e7e">3f41bc784e7e</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</h3>
@@ -220,7 +220,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/3f41bc784e7e">second a</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
@@ -229,7 +229,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5ed941583260">first a</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</td>
</tr>
@@ -312,7 +312,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log a @ 4:<a href="/rev/3f41bc784e7e">3f41bc784e7e</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</h3>
@@ -343,7 +343,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/3f41bc784e7e">second a</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
@@ -352,7 +352,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5ed941583260">first a</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</td>
</tr>
@@ -435,7 +435,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log a @ 1:<a href="/rev/5ed941583260">5ed941583260</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</h3>
@@ -466,7 +466,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5ed941583260">first a</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</td>
</tr>
@@ -549,7 +549,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log a @ 1:<a href="/rev/5ed941583260">5ed941583260</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</h3>
@@ -580,7 +580,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5ed941583260">first a</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</td>
</tr>
@@ -740,8 +740,8 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log c @ 7:<a href="/rev/46c1a66bd8fc">46c1a66bd8fc</a>
- <span class="branchname">a-branch</span> <span class="tag">tip</span>
- (following lines 1:2 <a href="/log/tip/c">back to filelog</a>)
+ <span class="phase">draft</span> <span class="branchhead">a-branch</span> <span class="tag">tip</span>
+ (following lines 1:2 <a href="/log/tip/c">all revisions for this file</a>)
</h3>
@@ -771,7 +771,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/46c1a66bd8fc">change c</a>
- <span class="branchhead">a-branch</span> <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">a-branch</span> <span class="tag">tip</span>
</td>
</tr>
@@ -780,7 +780,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/6563da9dcf87">b</a>
-
+ <span class="phase">draft</span>
</td>
</tr>
@@ -860,8 +860,8 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log c @ 7:<a href="/rev/46c1a66bd8fc?revcount=1">46c1a66bd8fc</a>
- <span class="branchname">a-branch</span> <span class="tag">tip</span>
- (following lines 1:2 <a href="/log/tip/c?revcount=1">back to filelog</a>)
+ <span class="phase">draft</span> <span class="branchhead">a-branch</span> <span class="tag">tip</span>
+ (following lines 1:2 <a href="/log/tip/c?revcount=1">all revisions for this file</a>)
</h3>
@@ -891,7 +891,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/46c1a66bd8fc?revcount=1">change c</a>
- <span class="branchhead">a-branch</span> <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">a-branch</span> <span class="tag">tip</span>
</td>
</tr>
@@ -1097,7 +1097,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log a @ 4:<a href="/rev/3f41bc784e7e">3f41bc784e7e</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</h3>
@@ -1128,7 +1128,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/3f41bc784e7e">second a</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1141,7 +1141,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5ed941583260">first a</a>
- <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
+ <span class="phase">draft</span> <span class="tag">a-tag</span> <span class="tag">a-bookmark</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1379,8 +1379,8 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log c @ 12:<a href="/rev/6e4182052f7b">6e4182052f7b</a>
- <span class="branchname">a-branch</span> <span class="tag">tip</span>
- (following lines 3:4 <a href="/log/tip/c">back to filelog</a>)
+ <span class="phase">draft</span> <span class="branchhead">a-branch</span> <span class="tag">tip</span>
+ (following lines 3:4 <a href="/log/tip/c">all revisions for this file</a>)
</h3>
@@ -1410,7 +1410,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/fb9bc322513a">touching beginning and end of c</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1429,7 +1429,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/e95928d60479">touch beginning of c</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1449,7 +1449,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5c6574614c37">make c bigger and touch its beginning</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1473,7 +1473,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/46c1a66bd8fc">change c</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1487,7 +1487,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/6563da9dcf87">b</a>
-
+ <span class="phase">draft</span>
</td>
</tr>
<tr><td colspan="3"><div class="bottomline inc-lineno"><pre class="sourcelines wrap">
@@ -1636,8 +1636,8 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
log c @ 8:<a href="/rev/5c6574614c37">5c6574614c37</a>
- <span class="branchname">a-branch</span>
- (following lines 3:4, descending <a href="/log/8/c">back to filelog</a>)
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
+ (following lines 3:4, descending <a href="/log/8/c">all revisions for this file</a>)
</h3>
@@ -1667,7 +1667,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/5c6574614c37">make c bigger and touch its beginning</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
@@ -1676,7 +1676,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/e95928d60479">touch beginning of c</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
@@ -1685,7 +1685,7 @@
<td class="author">test</td>
<td class="description">
<a href="/rev/fb9bc322513a">touching beginning and end of c</a>
- <span class="branchname">a-branch</span>
+ <span class="phase">draft</span> <span class="branchname">a-branch</span>
</td>
</tr>
--- a/tests/test-hgweb-json.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-json.t Mon Jan 22 17:53:02 2018 -0500
@@ -1335,7 +1335,356 @@
$ request json-graph
200 Script output follows
- "not yet implemented"
+ {
+ "changeset_count": 10,
+ "changesets": [
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "merge test-branch into default",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+ "parents": [
+ "ceed296fe500c3fac9541e31dad860cb49c89e45",
+ "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
+ ],
+ "phase": "draft",
+ "row": 0,
+ "tags": [
+ "tip"
+ ],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "test-branch",
+ "col": 1,
+ "color": 2,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "another commit in test-branch",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+ "parents": [
+ "6ab967a8ab3489227a83f80e920faa039a71819f"
+ ],
+ "phase": "draft",
+ "row": 1,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "test-branch",
+ "col": 1,
+ "color": 2,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "create test branch",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+ "parents": [
+ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+ ],
+ "phase": "draft",
+ "row": 2,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [
+ "bookmark2"
+ ],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "create tag2",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "ceed296fe500c3fac9541e31dad860cb49c89e45",
+ "parents": [
+ "f2890a05fea49bfaf9fb27ed5490894eba32da78"
+ ],
+ "phase": "draft",
+ "row": 3,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "another commit to da/foo",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+ "parents": [
+ "93a8ce14f89156426b7fa981af8042da53f03aa0"
+ ],
+ "phase": "draft",
+ "row": 4,
+ "tags": [
+ "tag2"
+ ],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "create tag",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "93a8ce14f89156426b7fa981af8042da53f03aa0",
+ "parents": [
+ "78896eb0e102174ce9278438a95e12543e4367a7"
+ ],
+ "phase": "public",
+ "row": 5,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "move foo",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+ "parents": [
+ "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+ ],
+ "phase": "public",
+ "row": 6,
+ "tags": [
+ "tag1"
+ ],
+ "user": "test"
+ },
+ {
+ "bookmarks": [
+ "bookmark1"
+ ],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "modify da/foo",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 1,
+ "width": -1
+ }
+ ],
+ "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+ "parents": [
+ "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+ ],
+ "phase": "public",
+ "row": 7,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 1,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "modify foo",
+ "edges": [
+ {
+ "bcolor": "",
+ "col": 0,
+ "color": 1,
+ "nextcol": 0,
+ "width": -1
+ },
+ {
+ "bcolor": "",
+ "col": 1,
+ "color": 2,
+ "nextcol": 0,
+ "width": -1
+ }
+ ],
+ "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+ "parents": [
+ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+ ],
+ "phase": "public",
+ "row": 8,
+ "tags": [],
+ "user": "test"
+ },
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "col": 0,
+ "color": 2,
+ "date": [
+ 0.0,
+ 0
+ ],
+ "desc": "initial",
+ "edges": [],
+ "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+ "parents": [],
+ "phase": "public",
+ "row": 9,
+ "tags": [],
+ "user": "test"
+ }
+ ],
+ "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7"
+ }
help/ shows help topics
@@ -1581,6 +1930,10 @@
"topic": "filesets"
},
{
+ "summary": "Command-line flags",
+ "topic": "flags"
+ },
+ {
"summary": "Glossary",
"topic": "glossary"
},
@@ -1644,3 +1997,28 @@
"rawdoc": "Working with Phases\n*", (glob)
"topic": "phases"
}
+
+Commit message with Japanese Kanji 'Noh', which ends with '\x5c'
+
+ $ echo foo >> da/foo
+ $ HGENCODING=cp932 hg ci -m `$PYTHON -c 'print("\x94\x5c")'`
+
+Commit message with null character
+
+ $ echo foo >> da/foo
+ >>> open('msg', 'wb').write('commit with null character: \0\n')
+ $ hg ci -l msg
+ $ rm msg
+
+Stop and restart with HGENCODING=cp932
+
+ $ killdaemons.py
+ $ HGENCODING=cp932 hg serve -p $HGPORT -d --pid-file=hg.pid \
+ > -A access.log -E error.log
+ $ cat hg.pid >> $DAEMON_PIDS
+
+Test json escape of multibyte characters
+
+ $ request json-filelog/tip/da/foo?revcount=2 | grep '"desc":'
+ "desc": "commit with null character: \u0000",
+ "desc": "\u80fd",
--- a/tests/test-hgweb-removed.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-removed.t Mon Jan 22 17:53:02 2018 -0500
@@ -62,7 +62,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
changeset 1:<a href="/rev/c78f6c5cbea9">c78f6c5cbea9</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -84,6 +84,7 @@
<th class="date">date</th>
<td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
</tr>
+
<tr>
<th class="author">parents</th>
<td class="author"><a href="/rev/cb9a9f314b8b">cb9a9f314b8b</a> </td>
@@ -190,7 +191,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
diff a @ 1:<a href="/rev/c78f6c5cbea9">c78f6c5cbea9</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
--- a/tests/test-hgweb-symrev.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb-symrev.t Mon Jan 22 17:53:02 2018 -0500
@@ -59,6 +59,9 @@
<a href="/graph/tip?revcount=30&style=paper">less</a>
<a href="/graph/tip?revcount=120&style=paper">more</a>
| rev 2: <a href="/graph/43c799df6e75?style=paper">(0)</a> <a href="/graph/tip?style=paper">tip</a>
+ <a href="/rev/9d8c40cba617?style=paper">third</a>
+ <a href="/rev/a7c1559b7bba?style=paper">second</a>
+ <a href="/rev/43c799df6e75?style=paper">first</a>
<a href="/graph/tip?revcount=30&style=paper">less</a>
<a href="/graph/tip?revcount=120&style=paper">more</a>
| rev 2: <a href="/graph/43c799df6e75?style=paper">(0)</a> <a href="/graph/tip?style=paper">tip</a>
@@ -123,6 +126,8 @@
<a href="/graph/xyzzy?revcount=30&style=paper">less</a>
<a href="/graph/xyzzy?revcount=120&style=paper">more</a>
| rev 1: <a href="/graph/43c799df6e75?style=paper">(0)</a> <a href="/graph/tip?style=paper">tip</a>
+ <a href="/rev/a7c1559b7bba?style=paper">second</a>
+ <a href="/rev/43c799df6e75?style=paper">first</a>
<a href="/graph/xyzzy?revcount=30&style=paper">less</a>
<a href="/graph/xyzzy?revcount=120&style=paper">more</a>
| rev 1: <a href="/graph/43c799df6e75?style=paper">(0)</a> <a href="/graph/tip?style=paper">tip</a>
@@ -254,6 +259,9 @@
<a href="/graph/tip?revcount=30&style=coal">less</a>
<a href="/graph/tip?revcount=120&style=coal">more</a>
| rev 2: <a href="/graph/43c799df6e75?style=coal">(0)</a> <a href="/graph/tip?style=coal">tip</a>
+ <a href="/rev/9d8c40cba617?style=coal">third</a>
+ <a href="/rev/a7c1559b7bba?style=coal">second</a>
+ <a href="/rev/43c799df6e75?style=coal">first</a>
<a href="/graph/tip?revcount=30&style=coal">less</a>
<a href="/graph/tip?revcount=120&style=coal">more</a>
| rev 2: <a href="/graph/43c799df6e75?style=coal">(0)</a> <a href="/graph/tip?style=coal">tip</a>
@@ -318,6 +326,8 @@
<a href="/graph/xyzzy?revcount=30&style=coal">less</a>
<a href="/graph/xyzzy?revcount=120&style=coal">more</a>
| rev 1: <a href="/graph/43c799df6e75?style=coal">(0)</a> <a href="/graph/tip?style=coal">tip</a>
+ <a href="/rev/a7c1559b7bba?style=coal">second</a>
+ <a href="/rev/43c799df6e75?style=coal">first</a>
<a href="/graph/xyzzy?revcount=30&style=coal">less</a>
<a href="/graph/xyzzy?revcount=120&style=coal">more</a>
| rev 1: <a href="/graph/43c799df6e75?style=coal">(0)</a> <a href="/graph/tip?style=coal">tip</a>
@@ -468,11 +478,11 @@
<a href="/graph/tip?style=gitweb">graph</a> |
<a href="/file/tip?style=gitweb">files</a> | <a href="/archive/tip.zip">zip</a> |
<a href="/log/43c799df6e75?style=gitweb">(0)</a> <a href="/log/tip?style=gitweb">tip</a> <br/>
- <a class="title" href="/rev/9d8c40cba617?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>third<span class="logtags"> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></a>
+ <a class="title" href="/rev/9d8c40cba617?style=gitweb">
<a href="/rev/9d8c40cba617?style=gitweb">changeset</a><br/>
- <a class="title" href="/rev/a7c1559b7bba?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
+ <a class="title" href="/rev/a7c1559b7bba?style=gitweb">
<a href="/rev/a7c1559b7bba?style=gitweb">changeset</a><br/>
- <a class="title" href="/rev/43c799df6e75?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>first<span class="logtags"> </span></a>
+ <a class="title" href="/rev/43c799df6e75?style=gitweb">
<a href="/rev/43c799df6e75?style=gitweb">changeset</a><br/>
<a href="/log/43c799df6e75?style=gitweb">(0)</a> <a href="/log/tip?style=gitweb">tip</a> <br/>
@@ -483,6 +493,9 @@
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
+ <a class="list" href="/rev/9d8c40cba617?style=gitweb"><b>third</b></a>
+ <a class="list" href="/rev/a7c1559b7bba?style=gitweb"><b>second</b></a>
+ <a class="list" href="/rev/43c799df6e75?style=gitweb"><b>first</b></a>
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
@@ -518,11 +531,11 @@
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'shortlog?style=gitweb&rev=all()' | egrep $REVLINKS
<a href="/file?style=gitweb">files</a> | <a href="/archive/tip.zip">zip</a>
- <a class="title" href="/rev/9d8c40cba617?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>third<span class="logtags"> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></a>
+ <a class="title" href="/rev/9d8c40cba617?style=gitweb">
<a href="/rev/9d8c40cba617?style=gitweb">changeset</a><br/>
- <a class="title" href="/rev/a7c1559b7bba?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
+ <a class="title" href="/rev/a7c1559b7bba?style=gitweb">
<a href="/rev/a7c1559b7bba?style=gitweb">changeset</a><br/>
- <a class="title" href="/rev/43c799df6e75?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>first<span class="logtags"> </span></a>
+ <a class="title" href="/rev/43c799df6e75?style=gitweb">
<a href="/rev/43c799df6e75?style=gitweb">changeset</a><br/>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'rev/xyzzy?style=gitweb' | egrep $REVLINKS
@@ -531,7 +544,7 @@
<a href="/graph/xyzzy?style=gitweb">graph</a> |
<a href="/file/xyzzy?style=gitweb">files</a> |
<a href="/raw-rev/xyzzy">raw</a> | <a href="/archive/xyzzy.zip">zip</a> |
- <a class="title" href="/raw-rev/a7c1559b7bba">second <span class="logtags"><span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
+ <a class="title" href="/raw-rev/a7c1559b7bba">
<td style="font-family:monospace"><a class="list" href="/rev/a7c1559b7bba?style=gitweb">a7c1559b7bba</a></td>
<a class="list" href="/rev/43c799df6e75?style=gitweb">43c799df6e75</a>
<a class="list" href="/rev/9d8c40cba617?style=gitweb">9d8c40cba617</a>
@@ -560,9 +573,9 @@
<a href="/graph/xyzzy?style=gitweb">graph</a> |
<a href="/file/xyzzy?style=gitweb">files</a> | <a href="/archive/xyzzy.zip">zip</a> |
<a href="/log/43c799df6e75?style=gitweb">(0)</a> <a href="/log/tip?style=gitweb">tip</a> <br/>
- <a class="title" href="/rev/a7c1559b7bba?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
+ <a class="title" href="/rev/a7c1559b7bba?style=gitweb">
<a href="/rev/a7c1559b7bba?style=gitweb">changeset</a><br/>
- <a class="title" href="/rev/43c799df6e75?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>first<span class="logtags"> </span></a>
+ <a class="title" href="/rev/43c799df6e75?style=gitweb">
<a href="/rev/43c799df6e75?style=gitweb">changeset</a><br/>
<a href="/log/43c799df6e75?style=gitweb">(0)</a> <a href="/log/tip?style=gitweb">tip</a> <br/>
@@ -573,6 +586,8 @@
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
+ <a class="list" href="/rev/a7c1559b7bba?style=gitweb"><b>second</b></a>
+ <a class="list" href="/rev/43c799df6e75?style=gitweb"><b>first</b></a>
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
@@ -709,13 +724,16 @@
<li><a href="/graph/tip?style=monoblue">graph</a></li>
<li><a href="/file/tip?style=monoblue">files</a></li>
<li><a href="/archive/tip.zip">zip</a></li>
- <h3 class="changelog"><a class="title" href="/rev/9d8c40cba617?style=monoblue">third<span class="logtags"> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></a></h3>
- <h3 class="changelog"><a class="title" href="/rev/a7c1559b7bba?style=monoblue">second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a></h3>
- <h3 class="changelog"><a class="title" href="/rev/43c799df6e75?style=monoblue">first<span class="logtags"> </span></a></h3>
+ <a class="title" href="/rev/9d8c40cba617?style=monoblue">
+ <a class="title" href="/rev/a7c1559b7bba?style=monoblue">
+ <a class="title" href="/rev/43c799df6e75?style=monoblue">
<a href="/log/43c799df6e75?style=monoblue">(0)</a> <a href="/log/tip?style=monoblue">tip</a>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'graph?style=monoblue' | egrep $REVLINKS
<li><a href="/file/tip?style=monoblue">files</a></li>
+ <a href="/rev/9d8c40cba617?style=monoblue">third</a>
+ <a href="/rev/a7c1559b7bba?style=monoblue">second</a>
+ <a href="/rev/43c799df6e75?style=monoblue">first</a>
<a href="/graph/tip?revcount=30&style=monoblue">less</a>
<a href="/graph/tip?revcount=120&style=monoblue">more</a>
| <a href="/graph/43c799df6e75?style=monoblue">(0)</a> <a href="/graph/tip?style=monoblue">tip</a>
@@ -753,16 +771,16 @@
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'shortlog?style=monoblue&rev=all()' | egrep $REVLINKS
<li><a href="/archive/tip.zip">zip</a></li>
- <h3 class="changelog"><a class="title" href="/rev/9d8c40cba617?style=monoblue">third<span class="logtags"> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></a></h3>
- <h3 class="changelog"><a class="title" href="/rev/a7c1559b7bba?style=monoblue">second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a></h3>
- <h3 class="changelog"><a class="title" href="/rev/43c799df6e75?style=monoblue">first<span class="logtags"> </span></a></h3>
+ <a class="title" href="/rev/9d8c40cba617?style=monoblue">
+ <a class="title" href="/rev/a7c1559b7bba?style=monoblue">
+ <a class="title" href="/rev/43c799df6e75?style=monoblue">
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'rev/xyzzy?style=monoblue' | egrep $REVLINKS
<li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
<li><a href="/file/xyzzy?style=monoblue">files</a></li>
<li><a href="/raw-rev/xyzzy">raw</a></li>
<li><a href="/archive/xyzzy.zip">zip</a></li>
- <h3 class="changeset"><a href="/raw-rev/a7c1559b7bba">second <span class="logtags"><span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a></h3>
+ <a href="/raw-rev/a7c1559b7bba">
<dd><a href="/rev/a7c1559b7bba?style=monoblue">a7c1559b7bba</a></dd>
<dd><a href="/rev/43c799df6e75?style=monoblue">43c799df6e75</a></dd>
<dd><a href="/rev/9d8c40cba617?style=monoblue">9d8c40cba617</a></dd>
@@ -789,12 +807,14 @@
<li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
<li><a href="/file/xyzzy?style=monoblue">files</a></li>
<li><a href="/archive/xyzzy.zip">zip</a></li>
- <h3 class="changelog"><a class="title" href="/rev/a7c1559b7bba?style=monoblue">second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a></h3>
- <h3 class="changelog"><a class="title" href="/rev/43c799df6e75?style=monoblue">first<span class="logtags"> </span></a></h3>
+ <a class="title" href="/rev/a7c1559b7bba?style=monoblue">
+ <a class="title" href="/rev/43c799df6e75?style=monoblue">
<a href="/log/43c799df6e75?style=monoblue">(0)</a> <a href="/log/tip?style=monoblue">tip</a>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'graph/xyzzy?style=monoblue' | egrep $REVLINKS
<li><a href="/file/xyzzy?style=monoblue">files</a></li>
+ <a href="/rev/a7c1559b7bba?style=monoblue">second</a>
+ <a href="/rev/43c799df6e75?style=monoblue">first</a>
<a href="/graph/xyzzy?revcount=30&style=monoblue">less</a>
<a href="/graph/xyzzy?revcount=120&style=monoblue">more</a>
| <a href="/graph/43c799df6e75?style=monoblue">(0)</a> <a href="/graph/tip?style=monoblue">tip</a>
@@ -926,6 +946,9 @@
<a href="/shortlog/tip?style=spartan">shortlog</a>
<a href="/file/tip/?style=spartan">files</a>
navigate: <small class="navigate"><a href="/graph/43c799df6e75?style=spartan">(0)</a> <a href="/graph/tip?style=spartan">tip</a> </small>
+ <a href="/rev/9d8c40cba617?style=spartan">third</a>
+ <a href="/rev/a7c1559b7bba?style=spartan">second</a>
+ <a href="/rev/43c799df6e75?style=spartan">first</a>
navigate: <small class="navigate"><a href="/graph/43c799df6e75?style=spartan">(0)</a> <a href="/graph/tip?style=spartan">tip</a> </small>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'tags?style=spartan' | egrep $REVLINKS
@@ -1003,6 +1026,8 @@
<a href="/shortlog/xyzzy?style=spartan">shortlog</a>
<a href="/file/xyzzy/?style=spartan">files</a>
navigate: <small class="navigate"><a href="/graph/43c799df6e75?style=spartan">(0)</a> <a href="/graph/tip?style=spartan">tip</a> </small>
+ <a href="/rev/a7c1559b7bba?style=spartan">second</a>
+ <a href="/rev/43c799df6e75?style=spartan">first</a>
navigate: <small class="navigate"><a href="/graph/43c799df6e75?style=spartan">(0)</a> <a href="/graph/tip?style=spartan">tip</a> </small>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'file/xyzzy?style=spartan' | egrep $REVLINKS
--- a/tests/test-hgweb.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hgweb.t Mon Jan 22 17:53:02 2018 -0500
@@ -267,7 +267,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
directory / @ 0:<a href="/rev/2ef0ac749a14">2ef0ac749a14</a>
- <span class="tag">tip</span> <span class="tag">@</span> <span class="tag">a b c</span> <span class="tag">d/e/f</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span> <span class="tag">@</span> <span class="tag">a b c</span> <span class="tag">d/e/f</span>
</h3>
@@ -340,7 +340,7 @@
$ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
200 Script output follows
- content-length: 9066
+ content-length: 9118
content-type: text/css
body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
@@ -406,8 +406,6 @@
}
td.indexlinks a:hover { background-color: #6666aa; }
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
- div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
- div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
.search {
margin-right: 8px;
@@ -467,6 +465,18 @@
background-color: #ffaaff;
border-color: #ffccff #ff00ee #ff00ee #ffccff;
}
+ span.logtags span.phasetag {
+ background-color: #dfafff;
+ border-color: #e2b8ff #ce48ff #ce48ff #e2b8ff;
+ }
+ span.logtags span.obsoletetag {
+ background-color: #dddddd;
+ border-color: #e4e4e4 #a3a3a3 #a3a3a3 #e4e4e4;
+ }
+ span.logtags span.instabilitytag {
+ background-color: #ffb1c0;
+ border-color: #ffbbc8 #ff4476 #ff4476 #ffbbc8;
+ }
span.logtags span.tagtag {
background-color: #ffffaa;
border-color: #ffffcc #ffee00 #ffee00 #ffffcc;
@@ -536,10 +546,9 @@
}
div#followlines {
- background-color: #B7B7B7;
- border: 1px solid #CCC;
- border-radius: 5px;
- padding: 4px;
+ background-color: #FFF;
+ border: 1px solid #d9d8d1;
+ padding: 5px;
position: fixed;
}
@@ -638,30 +647,26 @@
margin: 0;
}
- ul#nodebgs {
+ ul#graphnodes {
list-style: none inside none;
padding: 0;
margin: 0;
- top: -0.7em;
- }
-
- ul#graphnodes li, ul#nodebgs li {
- height: 39px;
}
- ul#graphnodes {
+ ul#graphnodes li {
+ position: relative;
+ height: 37px;
+ overflow: visible;
+ padding-top: 2px;
+ }
+
+ ul#graphnodes li .fg {
position: absolute;
z-index: 10;
- top: -0.8em;
- list-style: none inside none;
- padding: 0;
}
ul#graphnodes li .info {
- display: block;
font-size: 100%;
- position: relative;
- top: -3px;
font-style: italic;
}
--- a/tests/test-highlight.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-highlight.t Mon Jan 22 17:53:02 2018 -0500
@@ -113,7 +113,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
view primes.py @ 0:<a href="/rev/f4fca47b67e6">f4fca47b67e6</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
@@ -252,7 +252,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
annotate primes.py @ 0:<a href="/rev/f4fca47b67e6">f4fca47b67e6</a>
- <span class="tag">tip</span>
+ <span class="phase">draft</span> <span class="branchhead">default</span> <span class="tag">tip</span>
</h3>
--- a/tests/test-histedit-arguments.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-arguments.t Mon Jan 22 17:53:02 2018 -0500
@@ -148,7 +148,7 @@
$ mv .hg/histedit-state.back .hg/histedit-state
$ hg histedit --continue
- saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-histedit.hg
$ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2::
@ 4 f5ed five
|
@@ -265,7 +265,7 @@
HG: user: test
HG: branch 'default'
HG: changed alpha
- saved backup bundle to $TESTTMP/foo/.hg/strip-backup/c8e68270e35a-63d8b8d8-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/foo/.hg/strip-backup/c8e68270e35a-63d8b8d8-histedit.hg
$ hg update -q 2
$ echo x > x
@@ -354,7 +354,8 @@
$ mv ../corrupt-histedit .hg/histedit-state
$ hg histedit --abort
warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up
- abort: .*(No such file or directory:|The system cannot find the file specified).* (re)
+ abort: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg: $ENOENT$ (glob) (windows !)
+ abort: $ENOENT$: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg (glob) (no-windows !)
[255]
Histedit state has been exited
$ hg summary -q
--- a/tests/test-histedit-bookmark-motion.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-bookmark-motion.t Mon Jan 22 17:53:02 2018 -0500
@@ -88,7 +88,7 @@
> fold e860deea161a 4 e
> pick 652413bf663e 5 f
> EOF
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-45c027ab-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-45c027ab-histedit.hg
$ hg log --graph
@ changeset: 3:cacdfd884a93
| bookmark: five
@@ -143,7 +143,7 @@
> pick cacdfd884a93 3 f
> pick 59d9f330561f 2 d
> EOF
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-histedit.hg
We expect 'five' to stay at tip, since the tipmost bookmark is most
likely the useful signal.
--- a/tests/test-histedit-commute.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-commute.t Mon Jan 22 17:53:02 2018 -0500
@@ -420,7 +420,7 @@
> EOF
$ HGEDITOR="sh ./editor.sh" hg histedit 0
- saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/b0f4233702ca-4cf5af69-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/b0f4233702ca-4cf5af69-histedit.hg
$ hg --config diff.git=yes export 0
# HG changeset patch
--- a/tests/test-histedit-edit.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-edit.t Mon Jan 22 17:53:02 2018 -0500
@@ -273,7 +273,7 @@
HG: user: test
HG: branch 'default'
HG: added f
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg
$ hg status
@@ -437,7 +437,7 @@
(hg histedit --continue to resume)
[1]
$ HGEDITOR=true hg histedit --continue
- saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg
$ hg log -G
@ changeset: 0:0efcea34f18a
--- a/tests/test-histedit-fold.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-fold.t Mon Jan 22 17:53:02 2018 -0500
@@ -317,7 +317,7 @@
continue: hg histedit --continue
$ hg histedit --continue
251d831eeec5: empty changeset
- saved backup bundle to $TESTTMP/fold-to-empty-test/.hg/strip-backup/888f9082bf99-daa0b8b3-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/fold-to-empty-test/.hg/strip-backup/888f9082bf99-daa0b8b3-histedit.hg
$ hg logt --graph
@ 1:617f94f13c0f +4
|
@@ -394,7 +394,7 @@
HG: user: test
HG: branch 'default'
HG: changed file
- saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-3d69522c-histedit.hg (glob)
+ saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-3d69522c-histedit.hg
$ hg logt -G
@ 1:10c647b2cdd5 +4
|
--- a/tests/test-histedit-obsolete.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-histedit-obsolete.t Mon Jan 22 17:53:02 2018 -0500
@@ -51,9 +51,9 @@
o 0:cb9a9f314b8b a
$ hg debugobsolete
- e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
+ 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '12', 'operation': 'histedit', 'user': 'test'}
+ 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '12', 'operation': 'histedit', 'user': 'test'}
With some node gone missing during the edit.
@@ -71,6 +71,7 @@
$ hg commit --amend -X . -m XXXXXX
$ hg commit --amend -X . -m b2
$ hg --hidden --config extensions.strip= strip 'desc(XXXXXX)' --no-backup
+ warning: ignoring unknown working parent aba7da937030!
$ hg histedit --continue
$ hg log -G
@ 8:273c1f3b8626 c
@@ -80,13 +81,13 @@
o 0:cb9a9f314b8b a
$ hg debugobsolete
- e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 76f72745eac0643d16530e56e2f86e36e40631f1 2ca853e48edbd6453a0674dc0fe28a0974c51b9c 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 2ca853e48edbd6453a0674dc0fe28a0974c51b9c aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 49d44ab2be1b67a79127568a67c9c99430633b48 273c1f3b86267ed3ec684bb13af1fa4d6ba56e02 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 46abc7c4d8738e8563e577f7889e1b6db3da4199 aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
+ 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '12', 'operation': 'histedit', 'user': 'test'}
+ 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '12', 'operation': 'histedit', 'user': 'test'}
+ 76f72745eac0643d16530e56e2f86e36e40631f1 2ca853e48edbd6453a0674dc0fe28a0974c51b9c 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 2ca853e48edbd6453a0674dc0fe28a0974c51b9c aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 49d44ab2be1b67a79127568a67c9c99430633b48 273c1f3b86267ed3ec684bb13af1fa4d6ba56e02 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'histedit', 'user': 'test'}
+ 46abc7c4d8738e8563e577f7889e1b6db3da4199 aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '5', 'operation': 'histedit', 'user': 'test'}
$ cd ..
Base setup for the rest of the testing
@@ -170,13 +171,13 @@
o 0:cb9a9f314b8b a
$ hg debugobsolete
- d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
- b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'histedit', 'user': 'test'}
+ 177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'histedit', 'user': 'test'}
+ 055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'histedit', 'user': 'test'}
+ e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'histedit', 'user': 'test'}
+ 652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'histedit', 'user': 'test'}
+ 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'histedit', 'user': 'test'}
+ b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'histedit', 'user': 'test'}
Ensure hidden revision does not prevent histedit
@@ -222,6 +223,7 @@
[1]
$ echo c >> c
$ hg histedit --continue
+ 1 new orphan changesets
$ hg log -r 'orphan()'
11:c13eb81022ca f (no-eol)
@@ -526,7 +528,7 @@
$ hg histedit --abort
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/abort/.hg/strip-backup/4dc06258baa6-dff4ef05-backup.hg (glob)
+ saved backup bundle to $TESTTMP/abort/.hg/strip-backup/4dc06258baa6-dff4ef05-backup.hg
$ hg log -G
@ 18:ee118ab9fa44 (secret) k
@@ -575,4 +577,4 @@
o 0:cb9a9f314b8b (public) a
$ hg debugobsolete --rev .
- ee118ab9fa44ebb86be85996548b5517a39e5093 175d6b286a224c23f192e79a581ce83131a53fa2 0 (*) {'operation': 'histedit', 'user': 'test'} (glob)
+ ee118ab9fa44ebb86be85996548b5517a39e5093 175d6b286a224c23f192e79a581ce83131a53fa2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'histedit', 'user': 'test'}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-histedit-templates.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,54 @@
+Testing templating for histedit command
+
+Setup
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > histedit=
+ > [experimental]
+ > evolution=createmarkers
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+ $ for ch in a b c d; do echo foo > $ch; hg commit -Aqm "Added "$ch; done
+
+ $ hg log -G -T "{rev}:{node|short} {desc}"
+ @ 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+Getting the JSON output for nodechanges
+
+ $ hg histedit -Tjson --commands - 2>&1 <<EOF
+ > pick 28ad74487de9 Added c
+ > pick 62615734edd5 Added d
+ > pick 18d04c59bb5d Added a
+ > pick 29becc82797a Added b
+ > EOF
+ [
+ {
+ "nodechanges": {"18d04c59bb5d2d4090ad9a5b59bd6274adb63add": ["109f8ec895447f81b380ba8d4d8b66539ccdcb94"], "28ad74487de9599d00d81085be739c61fc340652": ["bff9e07c1807942b161dab768aa793b48e9a7f9d"], "29becc82797a4bc11ec8880b58eaecd2ab3e7760": ["f5dcf3b4db23f31f1aacf46c33d1393de303d26f"], "62615734edd52f06b6fb9c2beb429e4fe30d57b8": ["201423b441c84d9e6858daed653e0d22485c1cfa"]}
+ }
+ ]
+
+ $ hg log -G -T "{rev}:{node|short} {desc}"
+ @ 7:f5dcf3b4db23 Added b
+ |
+ o 6:109f8ec89544 Added a
+ |
+ o 5:201423b441c8 Added d
+ |
+ o 4:bff9e07c1807 Added c
+
+ $ hg histedit -T "{nodechanges|json}" --commands - 2>&1 <<EOF
+ > pick bff9e07c1807 Added c
+ > pick 201423b441c8 Added d
+ > pick 109f8ec89544 Added a
+ > roll f5dcf3b4db23 Added b
+ > EOF
+ {"109f8ec895447f81b380ba8d4d8b66539ccdcb94": ["8d01470bfeab64d3de13c49adb79d88790d38396"], "f3ec56a374bdbdf1953cacca505161442c6f3a3e": [], "f5dcf3b4db23f31f1aacf46c33d1393de303d26f": ["8d01470bfeab64d3de13c49adb79d88790d38396"]} (no-eol)
--- a/tests/test-hook.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-hook.t Mon Jan 22 17:53:02 2018 -0500
@@ -244,7 +244,7 @@
no changes found
pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push
pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a
- pushkey hook: HG_HOOKNAME=pushkey HG_HOOKTYPE=pushkey HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
+ pushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=pushkey HG_HOOKTYPE=pushkey HG_KEY=foo HG_NAMESPACE=bookmark HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a
txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a
exporting bookmark foo
[1]
@@ -281,9 +281,8 @@
listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
no changes found
pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push
- prepushkey.forbid hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a
- pushkey-abort: prepushkey hook exited with status 1
- abort: exporting bookmark baz failed!
+ prepushkey.forbid hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=baz HG_NAMESPACE=bookmark HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a
+ abort: prepushkey hook exited with status 1
[255]
$ cd ../a
@@ -685,7 +684,7 @@
$ hg up null
loading update.ne hook failed:
- abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
+ abort: $ENOENT$: $TESTTMP/d/repo/nonexistent.py
[255]
$ hg id
@@ -794,7 +793,7 @@
$ echo aa >> from/a
$ hg --cwd from ci -mb
$ hg --cwd from push
- pushing to $TESTTMP/to (glob)
+ pushing to $TESTTMP/to
searching for changes
changeset: 0:cb9a9f314b8b
tag: tip
--- a/tests/test-http-bad-server.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-http-bad-server.t Mon Jan 22 17:53:02 2018 -0500
@@ -36,8 +36,7 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: Connection reset by peer (no-windows !)
- abort: error: An existing connection was forcibly closed by the remote host (windows !)
+ abort: error: $ECONNRESET$
[255]
(The server exits on its own, but there is a race between that and starting a new server.
@@ -54,8 +53,7 @@
error: '' on FreeBSD and OS X.
What we ideally want are:
-abort: error: Connection reset by peer (no-windows !)
-abort: error: An existing connection was forcibly closed by the remote host (windows !)
+abort: error: $ECONNRESET$
The flakiness in this output was observable easily with
--runs-per-test=20 on macOS 10.12 during the freeze for 4.2.
@@ -120,9 +118,9 @@
write(23) -> Server: badhttpserver\r\n
write(37) -> Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41) -> Content-Type: application/mercurial-0.1\r\n
- write(21) -> Content-Length: 405\r\n
+ write(21) -> Content-Length: 417\r\n
write(2) -> \r\n
- write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(417) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(1? from -1) -> (1?) Accept-Encoding* (glob)
read limit reached; closing socket
@@ -130,7 +128,7 @@
readline(184 from -1) -> (27) Accept-Encoding: identity\r\n
readline(157 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(128 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(87 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(87 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(39 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(4 from -1) -> (4) host
read limit reached; closing socket
@@ -139,7 +137,7 @@
Failure to read getbundle HTTP request
- $ hg serve --config badserver.closeafterrecvbytes=292 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeafterrecvbytes=304 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
@@ -149,34 +147,36 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline(292 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
- readline(259 from -1) -> (27) Accept-Encoding: identity\r\n
- readline(232 from -1) -> (35) accept: application/mercurial-0.1\r\n
- readline(197 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
- readline(17? from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
- readline(12? from -1) -> (2) \r\n (glob)
+ readline(1 from -1) -> (1) x (?)
+ readline(1 from -1) -> (1) x (?)
+ readline(304 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
+ readline(271 from -1) -> (27) Accept-Encoding: identity\r\n
+ readline(244 from -1) -> (35) accept: application/mercurial-0.1\r\n
+ readline(209 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
+ readline(18? from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+ readline(13? from -1) -> (2) \r\n (glob)
write(36) -> HTTP/1.1 200 Script output follows\r\n
write(23) -> Server: badhttpserver\r\n
write(37) -> Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41) -> Content-Type: application/mercurial-0.1\r\n
- write(21) -> Content-Length: 405\r\n
+ write(21) -> Content-Length: 417\r\n
write(2) -> \r\n
- write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(12[34] from 65537\) -> \(2[67]\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
- readline(9? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
- readline(7? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
- readline(4? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
- readline(1 from -1) -> (1) x (?)
+ write(417) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
+ readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
+ readline(8? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+ readline(5? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+ readline(1? from -1) -> (1?) x-hgproto-1:* (glob)
read limit reached; closing socket
- readline(292 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
- readline(266 from -1) -> (27) Accept-Encoding: identity\r\n
- readline(239 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
- readline(210 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(169 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
- readline(121 from -1) -> (35) accept: application/mercurial-0.1\r\n
- readline(86 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
- readline(6? from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
- readline(1? from -1) -> (2) \r\n (glob)
+ readline(304 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
+ readline(278 from -1) -> (27) Accept-Encoding: identity\r\n
+ readline(251 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
+ readline(222 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
+ readline(181 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
+ readline(133 from -1) -> (35) accept: application/mercurial-0.1\r\n
+ readline(98 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
+ readline(7? from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+ readline(2? from -1) -> (2) \r\n (glob)
write(36) -> HTTP/1.1 200 Script output follows\r\n
write(23) -> Server: badhttpserver\r\n
write(37) -> Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
@@ -184,12 +184,12 @@
write(20) -> Content-Length: 42\r\n
write(2) -> \r\n
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\(1[23] from 65537\) -> \(1[23]\) GET /\?cmd=ge.? (re)
+ readline(2? from 65537) -> (2?) GET /?cmd=getbundle HTTP* (glob)
read limit reached; closing socket
- readline(292 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
- readline(262 from -1) -> (27) Accept-Encoding: identity\r\n
- readline(235 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
- readline(206 from -1) -> (206) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Ali
+ readline(304 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
+ readline(274 from -1) -> (27) Accept-Encoding: identity\r\n
+ readline(247 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
+ readline(218 from -1) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag
read limit reached; closing socket
$ rm -f error.log
@@ -216,9 +216,9 @@
write(23) -> Server: badhttpserver\r\n
write(37) -> Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41) -> Content-Type: application/mercurial-0.1\r\n
- write(21) -> Content-Length: 418\r\n
+ write(21) -> Content-Length: 430\r\n
write(2) -> \r\n
- write(418) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httppostargs httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(430) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httppostargs httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline\(14[67] from 65537\) -> \(2[67]\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
readline\(1(19|20) from -1\) -> \(27\) Accept-Encoding: identity\\r\\n (re)
readline(9? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob)
@@ -231,7 +231,7 @@
readline(261 from -1) -> (41) content-type: application/mercurial-0.1\r\n
readline(220 from -1) -> (19) vary: X-HgProto-1\r\n
readline(201 from -1) -> (19) x-hgargs-post: 28\r\n
- readline(182 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(182 from -1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(134 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(99 from -1) -> (20) content-length: 28\r\n
readline(79 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -275,7 +275,7 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: HTTP request error (incomplete response; expected 385 bytes got 20)
+ abort: HTTP request error (incomplete response; expected 397 bytes got 20)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
@@ -292,9 +292,9 @@
write(23 from 23) -> (121) Server: badhttpserver\r\n
write(37 from 37) -> (84) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (22) Content-Length: 405\r\n
+ write(21 from 21) -> (22) Content-Length: 417\r\n
write(2 from 2) -> (20) \r\n
- write(20 from 405) -> (0) lookup changegroupsu
+ write(20 from 417) -> (0) lookup changegroupsu
write limit reached; closing socket
$ rm -f error.log
@@ -308,7 +308,7 @@
$ hg clone http://localhost:$HGPORT/ clone
abort: 'http://localhost:$HGPORT/' does not appear to be an hg repository:
- ---%<--- (application/mercuria)
+ ---%<--- (applicat)
---%<---
!
@@ -327,22 +327,22 @@
write(23 from 23) -> (636) Server: badhttpserver\r\n
write(37 from 37) -> (599) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41 from 41) -> (558) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (537) Content-Length: 405\r\n
+ write(21 from 21) -> (537) Content-Length: 417\r\n
write(2 from 2) -> (535) \r\n
- write(405 from 405) -> (130) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(417 from 417) -> (118) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (94) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (71) Server: badhttpserver\r\n
- write(37 from 37) -> (34) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(34 from 41) -> (0) Content-Type: application/mercuria
+ write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (59) Server: badhttpserver\r\n
+ write(37 from 37) -> (22) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(22 from 41) -> (0) Content-Type: applicat
write limit reached; closing socket
write(36) -> HTTP/1.1 500 Internal Server Error\r\n
@@ -375,32 +375,32 @@
write(23 from 23) -> (701) Server: badhttpserver\r\n
write(37 from 37) -> (664) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
write(41 from 41) -> (623) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (602) Content-Length: 405\r\n
+ write(21 from 21) -> (602) Content-Length: 417\r\n
write(2 from 2) -> (600) \r\n
- write(405 from 405) -> (195) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(417 from 417) -> (183) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (159) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (136) Server: badhttpserver\r\n
- write(37 from 37) -> (99) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (58) Content-Type: application/mercurial-0.1\r\n
- write(20 from 20) -> (38) Content-Length: 42\r\n
- write(2 from 2) -> (36) \r\n
- write(36 from 42) -> (0) 96ee1d7354c4ad7372047672c36a1f561e3a
+ write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (124) Server: badhttpserver\r\n
+ write(37 from 37) -> (87) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n
+ write(20 from 20) -> (26) Content-Length: 42\r\n
+ write(2 from 2) -> (24) \r\n
+ write(24 from 42) -> (0) 96ee1d7354c4ad7372047672
write limit reached; closing socket
$ rm -f error.log
Server sends incomplete headers for getbundle response
- $ hg serve --config badserver.closeaftersendbytes=895 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=907 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
TODO this output is terrible
@@ -423,18 +423,18 @@
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (859) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (836) Server: badhttpserver\r\n
- write(37 from 37) -> (799) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (758) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (737) Content-Length: 405\r\n
- write(2 from 2) -> (735) \r\n
- write(405 from 405) -> (330) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(36 from 36) -> (871) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (848) Server: badhttpserver\r\n
+ write(37 from 37) -> (811) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (770) Content-Type: application/mercurial-0.1\r\n
+ write(21 from 21) -> (749) Content-Length: 417\r\n
+ write(2 from 2) -> (747) \r\n
+ write(417 from 417) -> (330) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
@@ -449,8 +449,8 @@
readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
- readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
@@ -466,7 +466,7 @@
Server sends empty HTTP body for getbundle
- $ hg serve --config badserver.closeaftersendbytes=933 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=945 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -484,18 +484,18 @@
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (897) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (874) Server: badhttpserver\r\n
- write(37 from 37) -> (837) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (796) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (775) Content-Length: 405\r\n
- write(2 from 2) -> (773) \r\n
- write(405 from 405) -> (368) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(36 from 36) -> (909) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (886) Server: badhttpserver\r\n
+ write(37 from 37) -> (849) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (808) Content-Type: application/mercurial-0.1\r\n
+ write(21 from 21) -> (787) Content-Length: 417\r\n
+ write(2 from 2) -> (785) \r\n
+ write(417 from 417) -> (368) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
@@ -510,8 +510,8 @@
readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
- readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
@@ -529,12 +529,12 @@
Server sends partial compression string
- $ hg serve --config badserver.closeaftersendbytes=945 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=969 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
- abort: HTTP request error (incomplete response; expected 1 bytes got 3)
+ abort: HTTP request error (incomplete response)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
@@ -547,46 +547,47 @@
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (909) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (886) Server: badhttpserver\r\n
- write(37 from 37) -> (849) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (808) Content-Type: application/mercurial-0.1\r\n
- write(21 from 21) -> (787) Content-Length: 405\r\n
- write(2 from 2) -> (785) \r\n
- write(405 from 405) -> (380) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
+ write(36 from 36) -> (933) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (910) Server: badhttpserver\r\n
+ write(37 from 37) -> (873) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (832) Content-Type: application/mercurial-0.1\r\n
+ write(21 from 21) -> (811) Content-Length: 417\r\n
+ write(2 from 2) -> (809) \r\n
+ write(417 from 417) -> (392) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Abookmarks%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (344) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (321) Server: badhttpserver\r\n
- write(37 from 37) -> (284) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (243) Content-Type: application/mercurial-0.1\r\n
- write(20 from 20) -> (223) Content-Length: 42\r\n
- write(2 from 2) -> (221) \r\n
- write(42 from 42) -> (179) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
+ write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (333) Server: badhttpserver\r\n
+ write(37 from 37) -> (296) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n
+ write(20 from 20) -> (235) Content-Length: 42\r\n
+ write(2 from 2) -> (233) \r\n
+ write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
- readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
- readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=zstd,zlib,none,bzip2\r\n
+ readline(-1) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
+ readline(-1) -> (48) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
readline(-1) -> (2) \r\n
- write(36 from 36) -> (143) HTTP/1.1 200 Script output follows\r\n
- write(23 from 23) -> (120) Server: badhttpserver\r\n
- write(37 from 37) -> (83) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
- write(41 from 41) -> (42) Content-Type: application/mercurial-0.2\r\n
- write(28 from 28) -> (14) Transfer-Encoding: chunked\r\n
- write(2 from 2) -> (12) \r\n
- write(6 from 6) -> (6) 1\\r\\n\x04\\r\\n (esc)
- write(6 from 9) -> (0) 4\r\nnon
+ write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n
+ write(23 from 23) -> (132) Server: badhttpserver\r\n
+ write(37 from 37) -> (95) Date: Fri, 14 Apr 2017 00:00:00 GMT\r\n
+ write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n
+ write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n
+ write(2 from 2) -> (24) \r\n
+ write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc)
+ write(9 from 9) -> (9) 4\r\nnone\r\n
+ write(9 from 9) -> (0) 4\r\nHG20\r\n
write limit reached; closing socket
write(27) -> 15\r\nInternal Server Error\r\n
@@ -594,7 +595,7 @@
Server sends partial bundle2 header magic
- $ hg serve --config badserver.closeaftersendbytes=954 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=966 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -618,7 +619,7 @@
Server sends incomplete bundle2 stream params length
- $ hg serve --config badserver.closeaftersendbytes=963 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=975 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -643,7 +644,7 @@
Servers stops after bundle2 stream params header
- $ hg serve --config badserver.closeaftersendbytes=966 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=978 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -668,7 +669,7 @@
Server stops sending after bundle2 part header length
- $ hg serve --config badserver.closeaftersendbytes=975 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=987 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -694,7 +695,7 @@
Server stops sending after bundle2 part header
- $ hg serve --config badserver.closeaftersendbytes=1022 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1034 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -724,7 +725,7 @@
Server stops after bundle2 part payload chunk size
- $ hg serve --config badserver.closeaftersendbytes=1031 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1055 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -732,22 +733,22 @@
adding changesets
transaction abort!
rollback completed
- abort: HTTP request error (incomplete response)
+ abort: HTTP request error (incomplete response; expected 459 bytes got 7)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
$ killdaemons.py $DAEMON_PIDS
$ tail -11 error.log
- write(28 from 28) -> (100) Transfer-Encoding: chunked\r\n
- write(2 from 2) -> (98) \r\n
- write(6 from 6) -> (92) 1\\r\\n\x04\\r\\n (esc)
- write(9 from 9) -> (83) 4\r\nnone\r\n
- write(9 from 9) -> (74) 4\r\nHG20\r\n
- write(9 from 9) -> (65) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9 from 9) -> (56) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47 from 47) -> (9) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9 from 9) -> (0) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+ write(2 from 2) -> (110) \r\n
+ write(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc)
+ write(9 from 9) -> (95) 4\r\nnone\r\n
+ write(9 from 9) -> (86) 4\r\nHG20\r\n
+ write(9 from 9) -> (77) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+ write(9 from 9) -> (68) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+ write(47 from 47) -> (21) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
+ write(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+ write(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc)
write limit reached; closing socket
write(27) -> 15\r\nInternal Server Error\r\n
@@ -755,7 +756,7 @@
Server stops sending in middle of bundle2 payload chunk
- $ hg serve --config badserver.closeaftersendbytes=1504 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1516 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -787,7 +788,7 @@
Server stops sending after 0 length payload chunk size
- $ hg serve --config badserver.closeaftersendbytes=1513 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1547 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -798,24 +799,24 @@
added 1 changesets with 1 changes to 1 files
transaction abort!
rollback completed
- abort: HTTP request error (incomplete response)
+ abort: HTTP request error (incomplete response; expected 23 bytes got 9)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
$ killdaemons.py $DAEMON_PIDS
$ tail -13 error.log
- write(28 from 28) -> (582) Transfer-Encoding: chunked\r\n
- write(2 from 2) -> (580) \r\n
- write(6 from 6) -> (574) 1\\r\\n\x04\\r\\n (esc)
- write(9 from 9) -> (565) 4\r\nnone\r\n
- write(9 from 9) -> (556) 4\r\nHG20\r\n
- write(9 from 9) -> (547) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9 from 9) -> (538) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47 from 47) -> (491) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9 from 9) -> (482) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(473 from 473) -> (9) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
- write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+ write(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc)
+ write(9 from 9) -> (587) 4\r\nnone\r\n
+ write(9 from 9) -> (578) 4\r\nHG20\r\n
+ write(9 from 9) -> (569) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+ write(9 from 9) -> (560) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+ write(47 from 47) -> (513) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
+ write(9 from 9) -> (504) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+ write(473 from 473) -> (31) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+ write(9 from 9) -> (22) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+ write(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc)
+ write(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc)
write limit reached; closing socket
write(27) -> 15\r\nInternal Server Error\r\n
@@ -824,7 +825,7 @@
Server stops sending after 0 part bundle part header (indicating end of bundle2 payload)
This is before the 0 size chunked transfer part that signals end of HTTP response.
- $ hg serve --config badserver.closeaftersendbytes=1710 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1722 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -868,7 +869,7 @@
Server sends a size 0 chunked-transfer size without terminating \r\n
- $ hg serve --config badserver.closeaftersendbytes=1713 -p $HGPORT -d --pid-file=hg.pid -E error.log
+ $ hg serve --config badserver.closeaftersendbytes=1725 -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
--- a/tests/test-http-bundle1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-http-bundle1.t Mon Jan 22 17:53:02 2018 -0500
@@ -26,15 +26,10 @@
Test server address cannot be reused
-#if windows
$ hg serve -p $HGPORT1 2>&1
- abort: cannot start server at 'localhost:$HGPORT1': * (glob)
+ abort: cannot start server at 'localhost:$HGPORT1': $EADDRINUSE$
[255]
-#else
- $ hg serve -p $HGPORT1 2>&1
- abort: cannot start server at 'localhost:$HGPORT1': Address already in use
- [255]
-#endif
+
$ cd ..
$ cat hg1.pid hg2.pid >> $DAEMON_PIDS
@@ -265,66 +260,66 @@
$ sed 's/.*] "/"/' < ../access.log
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=stream_out HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=stream_out HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=stream_out HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=stream_out HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524* (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
$ cd ..
--- a/tests/test-http-proxy.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-http-proxy.t Mon Jan 22 17:53:02 2018 -0500
@@ -107,19 +107,19 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat proxy.log
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
--- a/tests/test-http.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-http.t Mon Jan 22 17:53:02 2018 -0500
@@ -17,15 +17,10 @@
Test server address cannot be reused
-#if windows
$ hg serve -p $HGPORT1 2>&1
- abort: cannot start server at 'localhost:$HGPORT1': * (glob)
+ abort: cannot start server at 'localhost:$HGPORT1': $EADDRINUSE$
[255]
-#else
- $ hg serve -p $HGPORT1 2>&1
- abort: cannot start server at 'localhost:$HGPORT1': Address already in use
- [255]
-#endif
+
$ cd ..
$ cat hg1.pid hg2.pid >> $DAEMON_PIDS
@@ -253,66 +248,166 @@
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files
$ hg rollback -q
+ $ hg -R dest push http://user:pass@localhost:$HGPORT2/ --debug --config devel.debug.peer-request=yes
+ pushing to http://user:***@localhost:$HGPORT2/
+ using http://localhost:$HGPORT2/
+ http auth: user user, password ****
+ sending capabilities command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=capabilities
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ query 1; heads
+ sending batch command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=batch
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 68 bytes of commands arguments in headers
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ searching for changes
+ all remote heads known locally
+ preparing listkeys for "phases"
+ sending listkeys command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 16 bytes of commands arguments in headers
+ http auth: user user, password ****
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ received listkey for "phases": 58 bytes
+ checking for updated bookmarks
+ preparing listkeys for "bookmarks"
+ sending listkeys command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 19 bytes of commands arguments in headers
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ received listkey for "bookmarks": 0 bytes
+ sending branchmap command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=branchmap
+ devel-peer-request: Vary X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ sending branchmap command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=branchmap
+ devel-peer-request: Vary X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ preparing listkeys for "bookmarks"
+ sending listkeys command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 19 bytes of commands arguments in headers
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ received listkey for "bookmarks": 0 bytes
+ 1 changesets found
+ list of changesets:
+ 7f4e523d01f2cc3765ac8934da3d14db775ff872
+ bundle2-output-bundle: "HG20", 5 parts total
+ bundle2-output-part: "replycaps" 188 bytes payload
+ bundle2-output-part: "check:phases" 24 bytes payload
+ bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
+ bundle2-output-part: "phase-heads" 24 bytes payload
+ sending unbundle command
+ sending 996 bytes
+ devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle
+ devel-peer-request: Content-length 996
+ devel-peer-request: Content-type application/mercurial-0.1
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 16 bytes of commands arguments in headers
+ devel-peer-request: 996 bytes of data
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ bundle2-input-bundle: no-transaction
+ bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
+ bundle2-input-part: "output" (advisory) (params: 0 advisory) supported
+ bundle2-input-part: total payload size 100
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+ bundle2-input-part: "output" (advisory) supported
+ bundle2-input-bundle: 2 parts total
+ preparing listkeys for "phases"
+ sending listkeys command
+ devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
+ devel-peer-request: Vary X-HgArg-1,X-HgProto-1
+ devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ devel-peer-request: 16 bytes of commands arguments in headers
+ devel-peer-request: finished in *.???? seconds (200) (glob)
+ received listkey for "phases": 15 bytes
+ $ hg rollback -q
$ sed 's/.*] "/"/' < ../access.log
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=stream_out HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=stream_out HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=stream_out HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=stream_out HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365* (glob)
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365* (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
$ cd ..
--- a/tests/test-https.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-https.t Mon Jan 22 17:53:02 2018 -0500
@@ -34,15 +34,10 @@
Test server address cannot be reused
-#if windows
$ hg serve -p $HGPORT --certificate=$PRIV 2>&1
- abort: cannot start server at 'localhost:$HGPORT': * (glob)
+ abort: cannot start server at 'localhost:$HGPORT': $EADDRINUSE$
[255]
-#else
- $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
- abort: cannot start server at 'localhost:$HGPORT': Address already in use
- [255]
-#endif
+
$ cd ..
Our test cert is not signed by a trusted CA. It should fail to verify if
--- a/tests/test-import-git.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-import-git.t Mon Jan 22 17:53:02 2018 -0500
@@ -648,8 +648,8 @@
d
dddd
$ hg revert -aC
- forgetting dir/a (glob)
- reverting dir/d (glob)
+ forgetting dir/a
+ reverting dir/d
$ rm dir/a
prefix with default strip
@@ -673,8 +673,8 @@
d
dd
$ hg revert -aC
- forgetting dir/a (glob)
- reverting dir/d (glob)
+ forgetting dir/a
+ reverting dir/d
$ rm dir/a
(test that prefixes are relative to the cwd)
$ mkdir tmpdir
@@ -714,11 +714,11 @@
Renames, similarity and git diff
$ hg revert -aC
- forgetting dir/a (glob)
- undeleting dir/d (glob)
- undeleting dir/dir2/b (glob)
- forgetting dir/dir2/b2 (glob)
- reverting dir/dir2/c (glob)
+ forgetting dir/a
+ undeleting dir/d
+ undeleting dir/dir2/b
+ forgetting dir/dir2/b2
+ reverting dir/dir2/c
$ rm dir/a dir/dir2/b2
$ hg import --similarity 90 --no-commit - <<EOF
> diff --git a/a b/b
--- a/tests/test-import.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-import.t Mon Jan 22 17:53:02 2018 -0500
@@ -938,7 +938,7 @@
> rename to bar
> EOF
applying patch from stdin
- abort: path contains illegal component: ../outside/foo (glob)
+ abort: path contains illegal component: ../outside/foo
[255]
$ cd ..
@@ -1349,6 +1349,93 @@
$ cd ..
+commit message that looks like a diff header (issue1879)
+
+ $ hg init headerlikemsg
+ $ cd headerlikemsg
+ $ touch empty
+ $ echo nonempty >> nonempty
+ $ hg ci -qAl - <<EOF
+ > blah blah
+ > diff blah
+ > blah blah
+ > EOF
+ $ hg --config diff.git=1 log -pv
+ changeset: 0:c6ef204ef767
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ files: empty nonempty
+ description:
+ blah blah
+ diff blah
+ blah blah
+
+
+ diff --git a/empty b/empty
+ new file mode 100644
+ diff --git a/nonempty b/nonempty
+ new file mode 100644
+ --- /dev/null
+ +++ b/nonempty
+ @@ -0,0 +1,1 @@
+ +nonempty
+
+
+ (without --git, empty file is lost, but commit message should be preserved)
+
+ $ hg init plain
+ $ hg export 0 | hg -R plain import -
+ applying patch from stdin
+ $ hg --config diff.git=1 -R plain log -pv
+ changeset: 0:60a2d231e71f
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ files: nonempty
+ description:
+ blah blah
+ diff blah
+ blah blah
+
+
+ diff --git a/nonempty b/nonempty
+ new file mode 100644
+ --- /dev/null
+ +++ b/nonempty
+ @@ -0,0 +1,1 @@
+ +nonempty
+
+
+ (with --git, patch contents should be fully preserved)
+
+ $ hg init git
+ $ hg --config diff.git=1 export 0 | hg -R git import -
+ applying patch from stdin
+ $ hg --config diff.git=1 -R git log -pv
+ changeset: 0:c6ef204ef767
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ files: empty nonempty
+ description:
+ blah blah
+ diff blah
+ blah blah
+
+
+ diff --git a/empty b/empty
+ new file mode 100644
+ diff --git a/nonempty b/nonempty
+ new file mode 100644
+ --- /dev/null
+ +++ b/nonempty
+ @@ -0,0 +1,1 @@
+ +nonempty
+
+
+ $ cd ..
+
no segfault while importing a unified diff which start line is zero but chunk
size is non-zero
--- a/tests/test-incoming-outgoing.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-incoming-outgoing.t Mon Jan 22 17:53:02 2018 -0500
@@ -491,3 +491,63 @@
searching for changes
no changes found
[1]
+
+Create a "split" repo that pulls from r1 and pushes to r2, using default-push
+
+ $ hg clone r1 split
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cat > split/.hg/hgrc << EOF
+ > [paths]
+ > default = $TESTTMP/r3
+ > default-push = $TESTTMP/r2
+ > EOF
+ $ hg -R split outgoing
+ comparing with $TESTTMP/r2
+ searching for changes
+ changeset: 0:3e92d79f743a
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: a
+
+
+Use default:pushurl instead of default-push
+
+Windows needs a leading slash to make a URL that passes all of the checks
+ $ WD=`pwd`
+#if windows
+ $ WD="/$WD"
+#endif
+ $ cat > split/.hg/hgrc << EOF
+ > [paths]
+ > default = $WD/r3
+ > default:pushurl = file://$WD/r2
+ > EOF
+ $ hg -R split outgoing
+ comparing with file:/*/$TESTTMP/r2 (glob)
+ searching for changes
+ changeset: 0:3e92d79f743a
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: a
+
+
+Push and then double-check outgoing
+
+ $ echo a >> split/foo
+ $ hg -R split commit -Ama
+ $ hg -R split push
+ pushing to file:/*/$TESTTMP/r2 (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
+ $ hg -R split outgoing
+ comparing with file:/*/$TESTTMP/r2 (glob)
+ searching for changes
+ no changes found
+ [1]
+
--- a/tests/test-install.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-install.t Mon Jan 22 17:53:02 2018 -0500
@@ -14,6 +14,7 @@
checking registered compression engines (*zlib*) (glob)
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
+ checking "re2" regexp engine \((available|missing)\) (re)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... (* -c "import sys; sys.exit(0)") (glob)
@@ -44,6 +45,7 @@
"pythonlib": "*", (glob)
"pythonsecurity": [*], (glob)
"pythonver": "*.*.*", (glob)
+ "re2": (true|false), (re)
"templatedirs": "*mercurial?templates", (glob)
"username": "test",
"usernameerror": null,
@@ -67,6 +69,7 @@
checking registered compression engines (*zlib*) (glob)
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
+ checking "re2" regexp engine \((available|missing)\) (re)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... (* -c "import sys; sys.exit(0)") (glob)
@@ -95,7 +98,7 @@
#if execbit
$ chmod 755 tools/testeditor.exe
#endif
- $ hg debuginstall --config ui.editor=~/tools/testeditor.exe
+ $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
checking encoding (ascii)...
checking Python executable (*) (glob)
checking Python version (*) (glob)
@@ -110,9 +113,10 @@
checking registered compression engines (*zlib*) (glob)
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
+ checking "re2" regexp engine \((available|missing)\) (re)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
- checking commit editor... (* -c "import sys; sys.exit(0)") (glob)
+ checking commit editor... ($TESTTMP/tools/testeditor.exe)
checking username (test)
no problems detected
@@ -217,6 +221,7 @@
checking registered compression engines (*) (glob)
checking available compression engines (*) (glob)
checking available compression engines for wire protocol (*) (glob)
+ checking "re2" regexp engine \((available|missing)\) (re)
checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
checking commit editor... (*) (glob)
--- a/tests/test-issue1089.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-issue1089.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
adding a/b
$ hg rm a
- removing a/b (glob)
+ removing a/b
$ hg ci -m m a
$ mkdir a b
@@ -16,7 +16,7 @@
adding a/b
$ hg rm a
- removing a/b (glob)
+ removing a/b
$ cd b
Relative delete:
--- a/tests/test-issue1502.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-issue1502.t Mon Jan 22 17:53:02 2018 -0500
@@ -13,7 +13,7 @@
$ echo "bar" > foo1/a && hg -R foo1 commit -m "edit a in foo1"
$ echo "hi" > foo/a && hg -R foo commit -m "edited a foo"
$ hg -R foo1 pull
- pulling from $TESTTMP/foo (glob)
+ pulling from $TESTTMP/foo
searching for changes
adding changesets
adding manifests
@@ -30,7 +30,7 @@
$ echo "there" >> foo/a && hg -R foo commit -m "edited a again"
$ hg -R foo1 pull
- pulling from $TESTTMP/foo (glob)
+ pulling from $TESTTMP/foo
searching for changes
adding changesets
adding manifests
--- a/tests/test-issue612.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-issue612.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
adding src/a.c
$ hg mv src source
- moving src/a.c to source/a.c (glob)
+ moving src/a.c to source/a.c
$ hg ci -Ammove
--- a/tests/test-issue660.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-issue660.t Mon Jan 22 17:53:02 2018 -0500
@@ -67,9 +67,9 @@
$ hg revert --all
undeleting a
- forgetting a/a (glob)
+ forgetting a/a
forgetting b
- undeleting b/b (glob)
+ undeleting b/b
$ hg st
--- a/tests/test-keyword.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-keyword.t Mon Jan 22 17:53:02 2018 -0500
@@ -262,6 +262,7 @@
adding file changes
added 2 changesets with 3 changes to 3 files
new changesets a2392c293916:ef63ca68695b
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -272,7 +273,7 @@
Message-Id: <hg.a2392c293916*> (glob)
To: Test
- changeset a2392c293916 in $TESTTMP/Test (glob)
+ changeset a2392c293916 in $TESTTMP/Test
details: $TESTTMP/Test?cmd=changeset;node=a2392c293916
description:
addsym
@@ -295,7 +296,7 @@
Message-Id: <hg.ef63ca68695b*> (glob)
To: Test
- changeset ef63ca68695b in $TESTTMP/Test (glob)
+ changeset ef63ca68695b in $TESTTMP/Test
details: $TESTTMP/Test?cmd=changeset;node=ef63ca68695b
description:
absym
@@ -314,7 +315,6 @@
+++ b/b Thu Jan 01 00:00:00 1970 +0000
@@ -0,0 +1,1 @@
+ignore $Id$
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cp $HGRCPATH.nohooks $HGRCPATH
@@ -929,7 +929,7 @@
> default = ../Test
> EOF
$ hg incoming
- comparing with $TESTTMP/Test (glob)
+ comparing with $TESTTMP/Test
searching for changes
changeset: 2:bb948857c743
tag: tip
@@ -1138,7 +1138,7 @@
merging m
warning: conflicts while merging m! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat m
$Id$
--- a/tests/test-largefiles-cache.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-largefiles-cache.t Mon Jan 22 17:53:02 2018 -0500
@@ -207,7 +207,7 @@
$ echo corruption > .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020
$ hg up -C
getting changed largefiles
- large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27 (glob)
+ large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27
0 largefiles updated, 0 removed
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
updated to "cd24c147f45c: modified"
@@ -228,7 +228,7 @@
$ hg push http://localhost:$HGPORT1 -f --config files.usercache=nocache
pushing to http://localhost:$HGPORT1/
searching for changes
- abort: remotestore: could not open file $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020: HTTP Error 403: ssl required (glob)
+ abort: remotestore: could not open file $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020: HTTP Error 403: ssl required
[255]
$ rm .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020
--- a/tests/test-largefiles-misc.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-largefiles-misc.t Mon Jan 22 17:53:02 2018 -0500
@@ -66,8 +66,8 @@
./dirb/largefile
$ cd ..
$ hg mv dira dirc
- moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile (glob)
- moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile (glob)
+ moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile
+ moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile
$ find * | sort
dirc
dirc/baz
@@ -292,9 +292,9 @@
? subrepo/renamed-large.txt
$ hg addremove --dry-run subrepo
- removing subrepo/large.txt (glob)
- adding subrepo/normal.txt (glob)
- adding subrepo/renamed-large.txt (glob)
+ removing subrepo/large.txt
+ adding subrepo/normal.txt
+ adding subrepo/renamed-large.txt
$ hg status -S
! subrepo/large.txt
? large.dat
@@ -303,9 +303,9 @@
$ cd ..
$ hg -R statusmatch addremove --dry-run statusmatch/subrepo
- removing statusmatch/subrepo/large.txt (glob)
- adding statusmatch/subrepo/normal.txt (glob)
- adding statusmatch/subrepo/renamed-large.txt (glob)
+ removing statusmatch/subrepo/large.txt
+ adding statusmatch/subrepo/normal.txt
+ adding statusmatch/subrepo/renamed-large.txt
$ hg -R statusmatch status -S
! subrepo/large.txt
? large.dat
@@ -321,7 +321,7 @@
$ mv subrepo/renamed-large.txt subrepo/large.txt
$ hg addremove subrepo
- adding subrepo/normal.txt (glob)
+ adding subrepo/normal.txt
$ hg forget subrepo/normal.txt
$ hg addremove -S
@@ -393,7 +393,7 @@
Forget doesn't change the content of the file
$ echo 'pre-forget content' > subrepo/large.txt
$ hg forget -v subrepo/large.txt
- removing subrepo/large.txt (glob)
+ removing subrepo/large.txt
$ cat subrepo/large.txt
pre-forget content
@@ -403,7 +403,7 @@
C subrepo/large.txt
$ hg rm -v subrepo/large.txt
- removing subrepo/large.txt (glob)
+ removing subrepo/large.txt
$ hg revert -R subrepo subrepo/large.txt
$ rm subrepo/large.txt
$ hg addremove -S
@@ -532,10 +532,10 @@
Test orig files go where we want them
$ echo moremore >> anotherlarge
$ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups'
- creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub (glob)
- saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge (glob)
- reverting ../.hglf/sub/anotherlarge (glob)
- creating directory: $TESTTMP/addrm2/.hg/origbackups/sub (glob)
+ creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub
+ saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge
+ reverting ../.hglf/sub/anotherlarge
+ creating directory: $TESTTMP/addrm2/.hg/origbackups/sub
found 90c622cf65cebe75c5842f9136c459333faf392e in store
found 90c622cf65cebe75c5842f9136c459333faf392e in store
$ ls ../.hg/origbackups/sub
@@ -608,7 +608,7 @@
$ hg -q clone src clone2
$ hg -R clone2 paths | grep default
- default = $TESTTMP/issue3651/src (glob)
+ default = $TESTTMP/issue3651/src
$ hg -R clone2 summary --large
parent: 0:fc0bd45326d3 tip
@@ -619,14 +619,14 @@
phases: 1 draft
largefiles: (no files to upload)
$ hg -R clone2 outgoing --large
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
no changes found
largefiles: no files to upload
[1]
$ hg -R clone2 outgoing --large --graph --template "{rev}"
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
no changes found
largefiles: no files to upload
@@ -647,7 +647,7 @@
phases: 2 draft
largefiles: 1 entities for 1 files to upload
$ hg -R clone2 outgoing --large
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
changeset: 1:1acbe71ce432
tag: tip
@@ -659,7 +659,7 @@
b
$ hg -R clone2 outgoing --large --graph --template "{rev}"
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
@ 1
@@ -683,7 +683,7 @@
phases: 3 draft
largefiles: 1 entities for 3 files to upload
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
1:1acbe71ce432
2:6095d0695d70
@@ -695,7 +695,7 @@
$ hg -R clone2 cat -r 1 clone2/.hglf/b
89e6c98d92887913cadf06b2adb97f26cde4849b
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
query 1; heads
searching for changes
all remote heads known locally
@@ -733,7 +733,7 @@
phases: 6 draft
largefiles: 3 entities for 3 files to upload
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
1:1acbe71ce432
2:6095d0695d70
@@ -750,7 +750,7 @@
$ hg -R clone2 cat -r 4 clone2/.hglf/b
13f9ed0898e315bf59dc2973fec52037b6f441a2
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
query 1; heads
searching for changes
all remote heads known locally
@@ -792,7 +792,7 @@
phases: 6 draft
largefiles: 2 entities for 1 files to upload
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
searching for changes
2:6095d0695d70
3:7983dce246cc
@@ -802,7 +802,7 @@
b
$ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true
- comparing with $TESTTMP/issue3651/src (glob)
+ comparing with $TESTTMP/issue3651/src
query 1; heads
searching for changes
all remote heads known locally
@@ -843,7 +843,7 @@
A d1/g
$ hg up -qr0
$ hg mv d1 d2
- moving d1/f to d2/f (glob)
+ moving d1/f to d2/f
$ hg ci -qm2
Invoking status precommit hook
A d2/f
@@ -962,7 +962,7 @@
> largefiles=
> EOF
$ hg -R enabledlocally root
- $TESTTMP/individualenabling/enabledlocally (glob)
+ $TESTTMP/individualenabling/enabledlocally
$ hg -R notenabledlocally root
abort: repository requires features unknown to this Mercurial: largefiles!
(see https://mercurial-scm.org/wiki/MissingRequirement for more information)
@@ -1088,18 +1088,14 @@
*/no-largefiles/normal1 */no-largefiles/normal1 (glob) (no-windows !)
[1]
$ hg -R subrepo-root revert --all
- reverting subrepo-root/.hglf/large (glob)
+ reverting subrepo-root/.hglf/large
reverting subrepo no-largefiles
- reverting subrepo-root/no-largefiles/normal1 (glob)
+ reverting subrepo-root/no-largefiles/normal1
Move (and then undo) a directory move with only largefiles.
- $ listtree() {
- > $PYTHON $TESTDIR/list-tree.py $@
- > }
-
$ cd subrepo-root
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir/
.hglf/dir/subdir/
@@ -1112,9 +1108,9 @@
large.orig
$ hg mv dir/subdir dir/subdir2
- moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin (glob)
+ moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir/
.hglf/dir/subdir2/
@@ -1139,8 +1135,8 @@
? large.orig
$ hg revert --all
- undeleting .hglf/dir/subdir/large.bin (glob)
- forgetting .hglf/dir/subdir2/large.bin (glob)
+ undeleting .hglf/dir/subdir/large.bin
+ forgetting .hglf/dir/subdir2/large.bin
reverting subrepo no-largefiles
$ hg status -C
@@ -1154,7 +1150,7 @@
The standin for subdir2 should be deleted, not just dropped
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir/
.hglf/dir/subdir/
@@ -1173,7 +1169,7 @@
'subdir' should not be in the destination. It would be if the subdir2 directory
existed under .hglf/.
$ hg mv dir/subdir dir/subdir2
- moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin (glob)
+ moving .hglf/dir/subdir/large.bin to .hglf/dir/subdir2/large.bin
$ hg status -C
A dir/subdir2/large.bin
@@ -1181,7 +1177,7 @@
R dir/subdir/large.bin
? large.orig
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir/
.hglf/dir/subdir2/
@@ -1199,14 +1195,14 @@
$ hg --config extensions.purge= purge
$ hg mv dir/subdir dir2/subdir
- moving .hglf/dir/subdir/large.bin to .hglf/dir2/subdir/large.bin (glob)
+ moving .hglf/dir/subdir/large.bin to .hglf/dir2/subdir/large.bin
$ hg status -C
A dir2/subdir/large.bin
dir/subdir/large.bin
R dir/subdir/large.bin
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir2/
.hglf/dir2/subdir/
@@ -1218,14 +1214,14 @@
large
$ hg revert --all
- undeleting .hglf/dir/subdir/large.bin (glob)
- forgetting .hglf/dir2/subdir/large.bin (glob)
+ undeleting .hglf/dir/subdir/large.bin
+ forgetting .hglf/dir2/subdir/large.bin
reverting subrepo no-largefiles
$ hg status -C
? dir2/subdir/large.bin
- $ listtree .hglf dir* large*
+ $ $PYTHON $TESTDIR/list-tree.py .hglf dir* large*
.hglf/
.hglf/dir/
.hglf/dir/subdir/
@@ -1261,7 +1257,7 @@
> largefiles=
> EOF
$ hg -R dst pull --rebase
- pulling from $TESTTMP/issue3861/src (glob)
+ pulling from $TESTTMP/issue3861/src
requesting all changes
adding changesets
adding manifests
--- a/tests/test-largefiles-update.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-largefiles-update.t Mon Jan 22 17:53:02 2018 -0500
@@ -138,7 +138,7 @@
getting changed largefiles
1 largefiles updated, 0 removed
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg status -A large1
M large1
@@ -216,8 +216,7 @@
$ hg add --large largeY
$ hg status -A large1
- large1: The system cannot find the file specified (windows !)
- large1: No such file or directory (no-windows !)
+ large1: $ENOENT$
$ hg status -A large2
? large2
@@ -309,8 +308,7 @@
rebasing 4:07d6153b5c04 "#4" (tip)
$ hg status -A large1
- large1: The system cannot find the file specified (windows !)
- large1: No such file or directory (no-windows !)
+ large1: $ENOENT$
$ hg status -A largeX
C largeX
@@ -320,8 +318,7 @@
$ hg transplant -q 1 4
$ hg status -A large1
- large1: The system cannot find the file specified (windows !)
- large1: No such file or directory (no-windows !)
+ large1: $ENOENT$
$ hg status -A largeX
C largeX
@@ -331,8 +328,7 @@
$ hg transplant -q --merge 1 --merge 4
$ hg status -A large1
- large1: The system cannot find the file specified (windows !)
- large1: No such file or directory (no-windows !)
+ large1: $ENOENT$
$ hg status -A largeX
C largeX
@@ -444,7 +440,7 @@
$ hg update -q -C 2
$ hg strip 3 4
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9530e27857f7-2e7b195d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9530e27857f7-2e7b195d-backup.hg
$ mv .hg/strip-backup/9530e27857f7-2e7b195d-backup.hg $TESTTMP
(internal linear merging at "hg pull --update")
@@ -452,7 +448,7 @@
$ echo 'large1 for linear merge (conflict)' > large1
$ echo 'large2 for linear merge (conflict with normal file)' > large2
$ hg pull --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-2e7b195d-backup.hg
- pulling from $TESTTMP/9530e27857f7-2e7b195d-backup.hg (glob)
+ pulling from $TESTTMP/9530e27857f7-2e7b195d-backup.hg
searching for changes
adding changesets
adding manifests
--- a/tests/test-largefiles-wireproto.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-largefiles-wireproto.t Mon Jan 22 17:53:02 2018 -0500
@@ -181,7 +181,7 @@
pushing to http://localhost:$HGPORT1/
searching for changes
remote: largefiles: failed to put 4cdac4d8b084d0b599525cf732437fb337d422a8 into store: largefile contents do not match hash
- abort: remotestore: could not put $TESTTMP/r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 to remote store http://localhost:$HGPORT1/ (glob)
+ abort: remotestore: could not put $TESTTMP/r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 to remote store http://localhost:$HGPORT1/
[255]
$ mv 4cdac4d8b084d0b599525cf732437fb337d422a8 r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8
Push of file that exists on server but is corrupted - magic healing would be nice ... but too magic
@@ -352,7 +352,7 @@
searching 2 changesets for largefiles
verified existence of 2 revisions of 2 largefiles
$ tail -1 access.log
- $LOCALIP - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3D972a1a11f19934401291cc99117ec614933374ce%3Bstatlfile+sha%3Dc801c9cfe94400963fcb683246217d5db77f9a9a x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3D972a1a11f19934401291cc99117ec614933374ce%3Bstatlfile+sha%3Dc801c9cfe94400963fcb683246217d5db77f9a9a x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
$ hg -R batchverifyclone update
getting changed largefiles
2 largefiles updated, 0 removed
@@ -390,7 +390,7 @@
searching 3 changesets for largefiles
verified existence of 3 revisions of 3 largefiles
$ tail -1 access.log
- $LOCALIP - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3Dc8559c3c9cfb42131794b7d8009230403b9b454c x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3Dc8559c3c9cfb42131794b7d8009230403b9b454c x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
$ killdaemons.py
--- a/tests/test-largefiles.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-largefiles.t Mon Jan 22 17:53:02 2018 -0500
@@ -343,8 +343,8 @@
$ echo large6 > sub2/large6
$ echo large7 > sub2/large7
$ hg add --large sub2
- adding sub2/large6 as a largefile (glob)
- adding sub2/large7 as a largefile (glob)
+ adding sub2/large6 as a largefile
+ adding sub2/large7 as a largefile
$ hg st
M large3
A large5
@@ -661,7 +661,7 @@
Test that outgoing --large works (with revsets too)
$ hg outgoing --rev '.^' --large
- comparing with $TESTTMP/a (glob)
+ comparing with $TESTTMP/a
searching for changes
changeset: 8:c02fd3b77ec4
user: test
@@ -1098,7 +1098,7 @@
$ rm "${USERCACHE}"/*
$ cd a-backup
$ hg pull --all-largefiles --config paths.default-push=bogus/path
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -1113,7 +1113,7 @@
$ hg rollback
repository tip rolled back to revision 1 (undo pull)
$ hg pull -v --lfrev 'heads(pulled())+min(pulled())'
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
all local heads known remotely
6 changesets found
@@ -1199,7 +1199,7 @@
$ [ ! -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
$ hg pull --rebase --all-largefiles --config paths.default-push=bogus/path --config paths.default=../b
- pulling from $TESTTMP/b (glob)
+ pulling from $TESTTMP/b
searching for changes
adding changesets
adding manifests
@@ -1210,7 +1210,7 @@
Invoking status precommit hook
M sub/normal4
M sub2/large6
- saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
0 largefiles cached
$ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
$ hg log --template '{rev}:{node|short} {desc|firstline}\n'
@@ -1270,7 +1270,7 @@
Invoking status precommit hook
M sub/normal4
M sub2/large6
- saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
$ hg log --template '{rev}:{node|short} {desc|firstline}\n'
9:598410d3eb9a modify normal file largefile in repo d
8:a381d2c8c80e modify normal file and largefile in repo b
@@ -1500,8 +1500,8 @@
# XXX we don't really want to report that we're reverting the standin;
# that's just an implementation detail. But I don't see an obvious fix. ;-(
$ hg revert sub
- reverting .hglf/sub/large4 (glob)
- reverting sub/normal4 (glob)
+ reverting .hglf/sub/large4
+ reverting sub/normal4
$ hg status
M normal3
A sub2/large8
@@ -1513,8 +1513,8 @@
$ cat sub/large4
large4-modified
$ hg revert -a --no-backup
- undeleting .hglf/sub2/large6 (glob)
- forgetting .hglf/sub2/large8 (glob)
+ undeleting .hglf/sub2/large6
+ forgetting .hglf/sub2/large8
reverting normal3
$ hg status
? sub/large4.orig
@@ -1528,12 +1528,12 @@
revert some files to an older revision
$ hg revert --no-backup -r 8 sub2
- reverting .hglf/sub2/large6 (glob)
+ reverting .hglf/sub2/large6
$ cat sub2/large6
large6
$ hg revert --no-backup -C -r '.^' sub2
$ hg revert --no-backup sub2
- reverting .hglf/sub2/large6 (glob)
+ reverting .hglf/sub2/large6
$ hg status
"verify --large" actually verifies largefiles
@@ -1542,7 +1542,7 @@
$ pwd
$TESTTMP/e
$ hg paths
- default = $TESTTMP/d (glob)
+ default = $TESTTMP/d
$ hg verify --large
checking changesets
@@ -1565,14 +1565,14 @@
checking files
10 files, 10 changesets, 28 total revisions
searching 1 changesets for largefiles
- changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 (glob)
+ changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
verified existence of 3 revisions of 3 largefiles
[1]
- introduce corruption and make sure that it is caught when checking content:
$ echo '5 cents' > $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
$ hg verify -q --large --lfc
- changeset 9:598410d3eb9a: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 (glob)
+ changeset 9:598410d3eb9a: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
[1]
- cleanup
@@ -1582,16 +1582,16 @@
- verifying all revisions will fail because we didn't clone all largefiles to d:
$ echo 'T-shirt' > $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
$ hg verify -q --lfa --lfc
- changeset 0:30d30fe6a5be: large1 references missing $TESTTMP/d/.hg/largefiles/4669e532d5b2c093a78eca010077e708a071bb64 (glob)
- changeset 0:30d30fe6a5be: sub/large2 references missing $TESTTMP/d/.hg/largefiles/1deebade43c8c498a3c8daddac0244dc55d1331d (glob)
- changeset 1:ce8896473775: large1 references missing $TESTTMP/d/.hg/largefiles/5f78770c0e77ba4287ad6ef3071c9bf9c379742f (glob)
- changeset 1:ce8896473775: sub/large2 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob)
- changeset 3:9e8fbc4bce62: large1 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob)
- changeset 4:74c02385b94c: large3 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob)
- changeset 4:74c02385b94c: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob)
- changeset 5:9d5af5072dbd: large3 references missing $TESTTMP/d/.hg/largefiles/baaf12afde9d8d67f25dab6dced0d2bf77dba47c (glob)
- changeset 5:9d5af5072dbd: sub/large4 references missing $TESTTMP/d/.hg/largefiles/aeb2210d19f02886dde00dac279729a48471e2f9 (glob)
- changeset 6:4355d653f84f: large3 references missing $TESTTMP/d/.hg/largefiles/7838695e10da2bb75ac1156565f40a2595fa2fa0 (glob)
+ changeset 0:30d30fe6a5be: large1 references missing $TESTTMP/d/.hg/largefiles/4669e532d5b2c093a78eca010077e708a071bb64
+ changeset 0:30d30fe6a5be: sub/large2 references missing $TESTTMP/d/.hg/largefiles/1deebade43c8c498a3c8daddac0244dc55d1331d
+ changeset 1:ce8896473775: large1 references missing $TESTTMP/d/.hg/largefiles/5f78770c0e77ba4287ad6ef3071c9bf9c379742f
+ changeset 1:ce8896473775: sub/large2 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
+ changeset 3:9e8fbc4bce62: large1 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
+ changeset 4:74c02385b94c: large3 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
+ changeset 4:74c02385b94c: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
+ changeset 5:9d5af5072dbd: large3 references missing $TESTTMP/d/.hg/largefiles/baaf12afde9d8d67f25dab6dced0d2bf77dba47c
+ changeset 5:9d5af5072dbd: sub/large4 references missing $TESTTMP/d/.hg/largefiles/aeb2210d19f02886dde00dac279729a48471e2f9
+ changeset 6:4355d653f84f: large3 references missing $TESTTMP/d/.hg/largefiles/7838695e10da2bb75ac1156565f40a2595fa2fa0
[1]
- cleanup
@@ -1655,7 +1655,7 @@
Pulling 0 revisions with --all-largefiles should not fetch for all revisions
$ hg pull --all-largefiles
- pulling from $TESTTMP/d (glob)
+ pulling from $TESTTMP/d
searching for changes
no changes found
@@ -1752,8 +1752,8 @@
- revert should be able to revert files introduced in a pending merge
$ hg revert --all -r .
- removing .hglf/large (glob)
- undeleting .hglf/sub2/large6 (glob)
+ removing .hglf/large
+ undeleting .hglf/sub2/large6
Test that a normal file and a largefile with the same name and path cannot
coexist.
@@ -1761,7 +1761,7 @@
$ rm sub2/large7
$ echo "largeasnormal" > sub2/large7
$ hg add sub2/large7
- sub2/large7 already a largefile (glob)
+ sub2/large7 already a largefile
Test that transplanting a largefile change works correctly.
@@ -1832,7 +1832,7 @@
$ hg cat .hglf/sub/large4
e166e74c7303192238d60af5a9c4ce9bef0b7928
$ hg cat .hglf/normal3
- .hglf/normal3: no such file in rev 598410d3eb9a (glob)
+ .hglf/normal3: no such file in rev 598410d3eb9a
[1]
Test that renaming a largefile results in correct output for status
--- a/tests/test-lfconvert.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-lfconvert.t Mon Jan 22 17:53:02 2018 -0500
@@ -131,7 +131,7 @@
warning: stuff/maybelarge.dat looks like a binary file.
warning: conflicts while merging stuff/maybelarge.dat! (edit, then use 'hg resolve --mark')
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg cat -r . sub/maybelarge.dat > stuff/maybelarge.dat
$ hg resolve -m stuff/maybelarge.dat
@@ -215,7 +215,7 @@
$ hg share -q -U bigfile-repo shared
$ printf 'bogus' > shared/.hg/sharedpath
$ hg lfconvert shared foo
- abort: .hg/sharedpath points to nonexistent directory $TESTTMP/bogus! (glob)
+ abort: .hg/sharedpath points to nonexistent directory $TESTTMP/bogus!
[255]
$ hg lfconvert bigfile-repo largefiles-repo
initializing destination largefiles-repo
@@ -340,12 +340,12 @@
checking files
9 files, 8 changesets, 13 total revisions
searching 7 changesets for largefiles
- changeset 0:d4892ec57ce2: large references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/2e000fa7e85759c7f4c254d4d9c33ef481e459a7 (glob)
- changeset 1:334e5237836d: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c (glob)
- changeset 2:261ad3f3f037: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c (glob)
- changeset 3:55759520c76f: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38 (glob)
- changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38 (glob)
- changeset 6:17126745edfd: anotherlarge references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3 (glob)
+ changeset 0:d4892ec57ce2: large references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/2e000fa7e85759c7f4c254d4d9c33ef481e459a7
+ changeset 1:334e5237836d: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c
+ changeset 2:261ad3f3f037: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c
+ changeset 3:55759520c76f: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38
+ changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38
+ changeset 6:17126745edfd: anotherlarge references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3
verified existence of 6 revisions of 4 largefiles
[1]
$ hg -R largefiles-repo-hg showconfig paths
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs-largefiles.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,354 @@
+This tests the interaction between the largefiles and lfs extensions, and
+conversion from largefiles -> lfs.
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > largefiles =
+ >
+ > [lfs]
+ > # standin files are 41 bytes. Stay bigger for clarity.
+ > threshold = 42
+ > EOF
+
+Setup a repo with a normal file and a largefile, above and below the lfs
+threshold to test lfconvert. *.txt start life as a normal file; *.bin start as
+an lfs/largefile.
+
+ $ hg init largefiles
+ $ cd largefiles
+ $ echo 'normal' > normal.txt
+ $ echo 'normal above lfs threshold 0000000000000000000000000' > lfs.txt
+ $ hg ci -Am 'normal.txt'
+ adding lfs.txt
+ adding normal.txt
+ $ echo 'largefile' > large.bin
+ $ echo 'largefile above lfs threshold 0000000000000000000000' > lfs.bin
+ $ hg add --large large.bin lfs.bin
+ $ hg ci -m 'add largefiles'
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > lfs =
+ > EOF
+
+Add an lfs file and normal file that collide with files on the other branch.
+large.bin is added as a normal file, and is named as such only to clash with the
+largefile on the other branch.
+
+ $ hg up -q '.^'
+ $ echo 'below lfs threshold' > large.bin
+ $ echo 'lfs above the lfs threshold for length 0000000000000' > lfs.bin
+ $ hg ci -Am 'add with lfs extension'
+ adding large.bin
+ adding lfs.bin
+ created new head
+
+ $ hg log -G
+ @ changeset: 2:e989d0fa3764
+ | tag: tip
+ | parent: 0:29361292f54d
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: add with lfs extension
+ |
+ | o changeset: 1:6513aaab9ca0
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: add largefiles
+ |
+ o changeset: 0:29361292f54d
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: normal.txt
+
+--------------------------------------------------------------------------------
+Merge largefiles into lfs branch
+
+The largefiles extension will prompt to use the normal or largefile when merged
+into the lfs files. `hg manifest` will show standins if present. They aren't,
+because largefiles merge doesn't merge content. If it did, selecting (n)ormal
+would convert to lfs on commit, if appropriate.
+
+BUG: Largefiles isn't running the merge tool, like when two lfs files are
+merged. This is probably by design, but it should probably at least prompt if
+content should be taken from (l)ocal or (o)ther as well.
+
+ $ hg --config ui.interactive=True merge 6513aaab9ca0 <<EOF
+ > n
+ > n
+ > EOF
+ remote turned local normal file large.bin into a largefile
+ use (l)argefile or keep (n)ormal file? n
+ remote turned local normal file lfs.bin into a largefile
+ use (l)argefile or keep (n)ormal file? n
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'merge lfs with largefiles -> normal'
+ $ hg manifest
+ large.bin
+ lfs.bin
+ lfs.txt
+ normal.txt
+
+The merged lfs.bin resolved to lfs because the (n)ormal option was picked. The
+lfs.txt file is unchanged by the merge, because it was added before lfs was
+enabled, and the content didn't change.
+ $ hg debugdata lfs.bin 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:81c7492b2c05e130431f65a87651b54a30c5da72c99ce35a1e9b9872a807312b
+ size 53
+ x-is-binary 0
+ $ hg debugdata lfs.txt 0
+ normal above lfs threshold 0000000000000000000000000
+
+Another filelog entry is NOT made by the merge, so nothing is committed as lfs.
+ $ hg log -r . -T '{join(lfs_files, ", ")}\n'
+
+
+Replay the last merge, but pick (l)arge this time. The manifest will show any
+standins.
+
+ $ hg up -Cq e989d0fa3764
+
+ $ hg --config ui.interactive=True merge 6513aaab9ca0 <<EOF
+ > l
+ > l
+ > EOF
+ remote turned local normal file large.bin into a largefile
+ use (l)argefile or keep (n)ormal file? l
+ remote turned local normal file lfs.bin into a largefile
+ use (l)argefile or keep (n)ormal file? l
+ getting changed largefiles
+ 2 largefiles updated, 0 removed
+ 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'merge lfs with largefiles -> large'
+ created new head
+ $ hg manifest
+ .hglf/large.bin
+ .hglf/lfs.bin
+ lfs.txt
+ normal.txt
+
+--------------------------------------------------------------------------------
+Merge lfs into largefiles branch
+
+ $ hg up -Cq 6513aaab9ca0
+ $ hg --config ui.interactive=True merge e989d0fa3764 <<EOF
+ > n
+ > n
+ > EOF
+ remote turned local largefile large.bin into a normal file
+ keep (l)argefile or use (n)ormal file? n
+ remote turned local largefile lfs.bin into a normal file
+ keep (l)argefile or use (n)ormal file? n
+ getting changed largefiles
+ 0 largefiles updated, 0 removed
+ 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'merge largefiles with lfs -> normal'
+ created new head
+ $ hg manifest
+ large.bin
+ lfs.bin
+ lfs.txt
+ normal.txt
+
+The merged lfs.bin got converted to lfs because the (n)ormal option was picked.
+The lfs.txt file is unchanged by the merge, because it was added before lfs was
+enabled.
+ $ hg debugdata lfs.bin 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:81c7492b2c05e130431f65a87651b54a30c5da72c99ce35a1e9b9872a807312b
+ size 53
+ x-is-binary 0
+ $ hg debugdata lfs.txt 0
+ normal above lfs threshold 0000000000000000000000000
+
+Another filelog entry is NOT made by the merge, so nothing is committed as lfs.
+ $ hg log -r . -T '{join(lfs_files, ", ")}\n'
+
+
+Replay the last merge, but pick (l)arge this time. The manifest will show the
+standins.
+
+ $ hg up -Cq 6513aaab9ca0
+
+ $ hg --config ui.interactive=True merge e989d0fa3764 <<EOF
+ > l
+ > l
+ > EOF
+ remote turned local largefile large.bin into a normal file
+ keep (l)argefile or use (n)ormal file? l
+ remote turned local largefile lfs.bin into a normal file
+ keep (l)argefile or use (n)ormal file? l
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'merge largefiles with lfs -> large'
+ created new head
+ $ hg manifest
+ .hglf/large.bin
+ .hglf/lfs.bin
+ lfs.txt
+ normal.txt
+
+--------------------------------------------------------------------------------
+
+When both largefiles and lfs are configured to add by size, the tie goes to
+largefiles since it hooks cmdutil.add() and lfs hooks the filelog write in the
+commit. By the time the commit occurs, the tracked file is smaller than the
+threshold (assuming it is > 41, so the standins don't become lfs objects).
+
+ $ $PYTHON -c 'import sys ; sys.stdout.write("y\n" * 1048576)' > large_by_size.bin
+ $ hg --config largefiles.minsize=1 ci -Am 'large by size'
+ adding large_by_size.bin as a largefile
+ $ hg manifest
+ .hglf/large.bin
+ .hglf/large_by_size.bin
+ .hglf/lfs.bin
+ lfs.txt
+ normal.txt
+
+ $ hg rm large_by_size.bin
+ $ hg ci -m 'remove large_by_size.bin'
+
+Largefiles doesn't do anything special with diff, so it falls back to diffing
+the standins. Extdiff also is standin based comparison. Diff and extdiff both
+work on the original file for lfs objects.
+
+Largefile -> lfs transition
+ $ hg diff -r 1 -r 3
+ diff -r 6513aaab9ca0 -r dcc5ce63e252 .hglf/large.bin
+ --- a/.hglf/large.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +0,0 @@
+ -cef9a458373df9b0743a0d3c14d0c66fb19b8629
+ diff -r 6513aaab9ca0 -r dcc5ce63e252 .hglf/lfs.bin
+ --- a/.hglf/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +0,0 @@
+ -557fb6309cef935e1ac2c8296508379e4b15a6e6
+ diff -r 6513aaab9ca0 -r dcc5ce63e252 large.bin
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/large.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +below lfs threshold
+ diff -r 6513aaab9ca0 -r dcc5ce63e252 lfs.bin
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +lfs above the lfs threshold for length 0000000000000
+
+lfs -> largefiles transition
+ $ hg diff -r 2 -r 6
+ diff -r e989d0fa3764 -r 95e1e80325c8 .hglf/large.bin
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hglf/large.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +cef9a458373df9b0743a0d3c14d0c66fb19b8629
+ diff -r e989d0fa3764 -r 95e1e80325c8 .hglf/lfs.bin
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hglf/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +557fb6309cef935e1ac2c8296508379e4b15a6e6
+ diff -r e989d0fa3764 -r 95e1e80325c8 large.bin
+ --- a/large.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +0,0 @@
+ -below lfs threshold
+ diff -r e989d0fa3764 -r 95e1e80325c8 lfs.bin
+ --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +0,0 @@
+ -lfs above the lfs threshold for length 0000000000000
+
+A largefiles repo can be converted to lfs. The lfconvert command uses the
+convert extension under the hood with --to-normal. So the --config based
+parameters are available, but not --authormap, --branchmap, etc.
+
+ $ cd ..
+ $ hg lfconvert --to-normal largefiles nolargefiles 2>&1
+ initializing destination nolargefiles
+ 0 additional largefiles cached
+ scanning source...
+ sorting...
+ converting...
+ 8 normal.txt
+ 7 add largefiles
+ 6 add with lfs extension
+ 5 merge lfs with largefiles -> normal
+ 4 merge lfs with largefiles -> large
+ 3 merge largefiles with lfs -> normal
+ 2 merge largefiles with lfs -> large
+ 1 large by size
+ 0 remove large_by_size.bin
+ $ cd nolargefiles
+
+The requirement is added to the destination repo, and the extension is enabled
+locally.
+
+ $ cat .hg/requires
+ dotencode
+ fncache
+ generaldelta
+ lfs
+ revlogv1
+ store
+ $ hg config --debug extensions | grep lfs
+ $TESTTMP/nolargefiles/.hg/hgrc:*: extensions.lfs= (glob)
+
+ $ hg log -r 'all()' -G -T '{rev} {join(lfs_files, ", ")} ({desc})\n'
+ o 8 (remove large_by_size.bin)
+ |
+ o 7 large_by_size.bin (large by size)
+ |
+ o 6 (merge largefiles with lfs -> large)
+ |\
+ +---o 5 (merge largefiles with lfs -> normal)
+ | |/
+ +---o 4 lfs.bin (merge lfs with largefiles -> large)
+ | |/
+ +---o 3 (merge lfs with largefiles -> normal)
+ | |/
+ | o 2 lfs.bin (add with lfs extension)
+ | |
+ o | 1 lfs.bin (add largefiles)
+ |/
+ o 0 lfs.txt (normal.txt)
+
+ $ hg debugdata lfs.bin 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:2172a5bd492dd41ec533b9bb695f7691b6351719407ac797f0ccad5348c81e62
+ size 53
+ x-is-binary 0
+ $ hg debugdata lfs.bin 1
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:81c7492b2c05e130431f65a87651b54a30c5da72c99ce35a1e9b9872a807312b
+ size 53
+ x-is-binary 0
+ $ hg debugdata lfs.bin 2
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:2172a5bd492dd41ec533b9bb695f7691b6351719407ac797f0ccad5348c81e62
+ size 53
+ x-is-binary 0
+ $ hg debugdata lfs.bin 3
+ abort: invalid revision identifier 3
+ [255]
+
+No diffs when comparing merge and p1 that kept p1's changes. Diff of lfs to
+largefiles no longer operates in standin files.
+
+ $ hg diff -r 2:3
+ $ hg diff -r 2:6
+ diff -r e989d0fa3764 -r 752e3a0d8488 large.bin
+ --- a/large.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ b/large.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,1 @@
+ -below lfs threshold
+ +largefile
+ diff -r e989d0fa3764 -r 752e3a0d8488 lfs.bin
+ --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ +++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,1 @@
+ -lfs above the lfs threshold for length 0000000000000
+ +largefile above lfs threshold 0000000000000000000000
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs-pointer.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,41 @@
+from __future__ import absolute_import, print_function
+
+import os
+import sys
+
+# make it runnable using python directly without run-tests.py
+sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
+
+from hgext.lfs import pointer
+
+def tryparse(text):
+ r = {}
+ try:
+ r = pointer.deserialize(text)
+ print('ok')
+ except Exception as ex:
+ print(ex)
+ if r:
+ text2 = r.serialize()
+ if text2 != text:
+ print('reconstructed text differs')
+ return r
+
+t = ('version https://git-lfs.github.com/spec/v1\n'
+ 'oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1'
+ '258daaa5e2ca24d17e2393\n'
+ 'size 12345\n'
+ 'x-foo extra-information\n')
+
+tryparse('')
+tryparse(t)
+tryparse(t.replace('git-lfs', 'unknown'))
+tryparse(t.replace('v1\n', 'v1\n\n'))
+tryparse(t.replace('sha256', 'ahs256'))
+tryparse(t.replace('sha256:', ''))
+tryparse(t.replace('12345', '0x12345'))
+tryparse(t.replace('extra-information', 'extra\0information'))
+tryparse(t.replace('extra-information', 'extra\ninformation'))
+tryparse(t.replace('x-foo', 'x_foo'))
+tryparse(t.replace('oid', 'blobid'))
+tryparse(t.replace('size', 'size-bytes').replace('oid', 'object-id'))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs-pointer.py.out Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,12 @@
+missed keys: oid, size
+ok
+unexpected value: version='https://unknown.github.com/spec/v1'
+cannot parse git-lfs text: 'version https://git-lfs.github.com/spec/v1\n\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 12345\nx-foo extra-information\n'
+unexpected value: oid='ahs256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
+unexpected value: oid='4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
+unexpected value: size='0x12345'
+ok
+cannot parse git-lfs text: 'version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 12345\nx-foo extra\ninformation\n'
+unexpected key: x_foo
+missed keys: oid
+missed keys: oid, size
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs-serve.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,283 @@
+#testcases lfsremote-on lfsremote-off
+#require serve
+
+This test splits `hg serve` with and without using the extension into separate
+tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
+indicates whether or not there are commits that use an LFS file, and "D"/"E"
+indicates whether or not the extension is loaded. The "X" cases are not tested
+individually, because the lfs requirement causes the process to bail early if
+the extension is disabled.
+
+. Server
+.
+. No-LFS LFS
+. +----------------------------+
+. | || D | E | D | E |
+. |---++=======================|
+. C | D || N/A | #1 | X | #4 |
+. l No +---++-----------------------|
+. i LFS | E || #2 | #2 | X | #5 |
+. e +---++-----------------------|
+. n | D || X | X | X | X |
+. t LFS |---++-----------------------|
+. | E || #3 | #3 | X | #6 |
+. |---++-----------------------+
+
+ $ hg init server
+ $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
+
+Skip the experimental.changegroup3=True config. Failure to agree on this comes
+first, and causes a "ValueError: no common changegroup version" or "abort:
+HTTP Error 500: Internal Server Error", if the extension is only loaded on one
+side. If that *is* enabled, the subsequent failure is "abort: missing processor
+for flag '0x2000'!" if the extension is only loaded on one side (possibly also
+masked by the Internal Server Error message).
+ $ cat >> $HGRCPATH <<EOF
+ > [lfs]
+ > url=file:$TESTTMP/dummy-remote/
+ > threshold=10
+ > [web]
+ > allow_push=*
+ > push_ssl=False
+ > EOF
+
+#if lfsremote-on
+ $ hg --config extensions.lfs= -R server \
+ > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
+#else
+ $ hg --config extensions.lfs=! -R server \
+ > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
+#endif
+
+ $ cat hg.pid >> $DAEMON_PIDS
+ $ hg clone -q http://localhost:$HGPORT client
+ $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
+ [1]
+
+--------------------------------------------------------------------------------
+Case #1: client with non-lfs content and the extension disabled; server with
+non-lfs content, and the extension enabled.
+
+ $ cd client
+ $ echo 'non-lfs' > nonlfs.txt
+ $ hg ci -Aqm 'non-lfs'
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ [1]
+
+#if lfsremote-on
+
+ $ hg push -q
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
+ $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg init $TESTTMP/client1_pull
+ $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
+ $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg identify http://localhost:$HGPORT
+ d437e1d24fbd
+
+#endif
+
+--------------------------------------------------------------------------------
+Case #2: client with non-lfs content and the extension enabled; server with
+non-lfs content, and the extension state controlled by #testcases.
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > lfs =
+ > EOF
+ $ echo 'non-lfs' > nonlfs2.txt
+ $ hg ci -Aqm 'non-lfs file with lfs client'
+
+Since no lfs content has been added yet, the push is allowed, even when the
+extension is not enabled remotely.
+
+ $ hg push -q
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
+ $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg init $TESTTMP/client2_pull
+ $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
+ $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
+ [1]
+
+ $ hg identify http://localhost:$HGPORT
+ 1477875038c6
+
+--------------------------------------------------------------------------------
+Case #3: client with lfs content and the extension enabled; server with
+non-lfs content, and the extension state controlled by #testcases. The server
+should have an 'lfs' requirement after it picks up its first commit with a blob.
+
+ $ echo 'this is a big lfs file' > lfs.bin
+ $ hg ci -Aqm 'lfs'
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ .hg/requires:lfs
+
+#if lfsremote-off
+ $ hg push -q
+ abort: required features are not supported in the destination: lfs
+ (enable the lfs extension on the server)
+ [255]
+#else
+ $ hg push -q
+#endif
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ .hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
+ $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
+ $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
+ $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
+
+ $ hg init $TESTTMP/client3_pull
+ $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
+ $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
+ $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
+ $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
+
+The difference here is the push failed above when the extension isn't
+enabled on the server.
+ $ hg identify http://localhost:$HGPORT
+ 8374dc4052cb (lfsremote-on !)
+ 1477875038c6 (lfsremote-off !)
+
+Don't bother testing the lfsremote-off cases- the server won't be able
+to launch if there's lfs content and the extension is disabled.
+
+#if lfsremote-on
+
+--------------------------------------------------------------------------------
+Case #4: client with non-lfs content and the extension disabled; server with
+lfs content, and the extension enabled.
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > lfs = !
+ > EOF
+
+ $ hg init $TESTTMP/client4
+ $ cd $TESTTMP/client4
+ $ cat >> .hg/hgrc <<EOF
+ > [paths]
+ > default = http://localhost:$HGPORT
+ > EOF
+ $ echo 'non-lfs' > nonlfs2.txt
+ $ hg ci -Aqm 'non-lfs'
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg push -q --force
+ warning: repository is unrelated
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ $TESTTMP/server/.hg/requires:lfs
+
+TODO: fail more gracefully.
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client4_clone
+ abort: HTTP Error 500: Internal Server Error
+ [255]
+ $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
+ grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
+ $TESTTMP/server/.hg/requires:lfs
+ [2]
+
+TODO: fail more gracefully.
+
+ $ hg init $TESTTMP/client4_pull
+ $ hg -R $TESTTMP/client4_pull pull -q http://localhost:$HGPORT
+ abort: HTTP Error 500: Internal Server Error
+ [255]
+ $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg identify http://localhost:$HGPORT
+ 03b080fa9d93
+
+--------------------------------------------------------------------------------
+Case #5: client with non-lfs content and the extension enabled; server with
+lfs content, and the extension enabled.
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > lfs =
+ > EOF
+ $ echo 'non-lfs' > nonlfs3.txt
+ $ hg ci -Aqm 'non-lfs file with lfs client'
+
+ $ hg push -q
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
+ $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
+ $TESTTMP/client5_clone/.hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg init $TESTTMP/client5_pull
+ $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
+ $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
+ $TESTTMP/client5_pull/.hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg identify http://localhost:$HGPORT
+ c729025cc5e3
+
+--------------------------------------------------------------------------------
+Case #6: client with lfs content and the extension enabled; server with
+lfs content, and the extension enabled.
+
+ $ echo 'this is another lfs file' > lfs2.txt
+ $ hg ci -Aqm 'lfs file with lfs client'
+
+ $ hg push -q
+ $ grep 'lfs' .hg/requires $SERVER_REQUIRES
+ .hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
+ $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
+ $TESTTMP/client6_clone/.hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg init $TESTTMP/client6_pull
+ $ hg -R $TESTTMP/client6_pull pull -q http://localhost:$HGPORT
+ $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
+ $TESTTMP/client6_pull/.hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+ $ hg identify http://localhost:$HGPORT
+ d3b84d50eacb
+
+--------------------------------------------------------------------------------
+Misc: process dies early if a requirement exists and the extension is disabled
+
+ $ hg --config extensions.lfs=! summary
+ abort: repository requires features unknown to this Mercurial: lfs!
+ (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
+ [255]
+
+#endif
+
+ $ $PYTHON $TESTDIR/killdaemons.py $DAEMON_PIDS
+
+#if lfsremote-on
+ $ cat $TESTTMP/errors.log | grep '^[A-Z]'
+ Traceback (most recent call last):
+ ValueError: no common changegroup version
+ Traceback (most recent call last):
+ ValueError: no common changegroup version
+#else
+ $ cat $TESTTMP/errors.log
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs-test-server.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,206 @@
+#require lfs-test-server
+
+ $ LFS_LISTEN="tcp://:$HGPORT"
+ $ LFS_HOST="localhost:$HGPORT"
+ $ LFS_PUBLIC=1
+ $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
+#if no-windows
+ $ lfs-test-server &> lfs-server.log &
+ $ echo $! >> $DAEMON_PIDS
+#else
+ $ cat >> $TESTTMP/spawn.py <<EOF
+ > import os
+ > import subprocess
+ > import sys
+ >
+ > for path in os.environ["PATH"].split(os.pathsep):
+ > exe = os.path.join(path, 'lfs-test-server.exe')
+ > if os.path.exists(exe):
+ > with open('lfs-server.log', 'wb') as out:
+ > p = subprocess.Popen(exe, stdout=out, stderr=out)
+ > sys.stdout.write('%s\n' % p.pid)
+ > sys.exit(0)
+ > sys.exit(1)
+ > EOF
+ $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
+#endif
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > lfs=
+ > [lfs]
+ > url=http://foo:bar@$LFS_HOST/
+ > track=all()
+ > EOF
+
+ $ hg init repo1
+ $ cd repo1
+ $ echo THIS-IS-LFS > a
+ $ hg commit -m a -A a
+
+A push can be serviced directly from the usercache if it isn't in the local
+store.
+
+ $ hg init ../repo2
+ $ mv .hg/store/lfs .hg/store/lfs_
+ $ hg push ../repo2 -v
+ pushing to ../repo2
+ searching for changes
+ lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
+ lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
+ 1 changesets found
+ uncompressed size of bundle content:
+ * (changelog) (glob)
+ * (manifests) (glob)
+ * a (glob)
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
+ $ mv .hg/store/lfs_ .hg/store/lfs
+
+Clear the cache to force a download
+ $ rm -rf `hg config lfs.usercache`
+ $ cd ../repo2
+ $ hg update tip -v
+ resolving manifests
+ getting a
+ lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
+ lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
+ lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
+ lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+When the server has some blobs already
+
+ $ hg mv a b
+ $ echo ANOTHER-LARGE-FILE > c
+ $ echo ANOTHER-LARGE-FILE2 > d
+ $ hg commit -m b-and-c -A b c d
+ $ hg push ../repo1 -v | grep -v '^ '
+ pushing to ../repo1
+ searching for changes
+ lfs: need to transfer 2 objects (39 bytes)
+ lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
+ lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
+ lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
+ lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ 1 changesets found
+ uncompressed size of bundle content:
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 3 changes to 3 files
+
+Clear the cache to force a download
+ $ rm -rf `hg config lfs.usercache`
+ $ hg --repo ../repo1 update tip -v
+ resolving manifests
+ getting b
+ lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
+ getting c
+ lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
+ lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
+ lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
+ getting d
+ lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
+ lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
+ lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
+ lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test a corrupt file download, but clear the cache first to force a download.
+
+ $ rm -rf `hg config lfs.usercache`
+ $ cp $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 blob
+ $ echo 'damage' > $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ $ rm ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ $ rm ../repo1/*
+
+ $ hg --repo ../repo1 update -C tip -v
+ resolving manifests
+ getting a
+ lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
+ getting b
+ lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
+ getting c
+ lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
+ abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ [255]
+
+The corrupted blob is not added to the usercache or local store
+
+ $ test -f ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ [1]
+ $ test -f `hg config lfs.usercache`/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+ [1]
+ $ cp blob $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
+
+Test a corrupted file upload
+
+ $ echo 'another lfs blob' > b
+ $ hg ci -m 'another blob'
+ $ echo 'damage' > .hg/store/lfs/objects/e6/59058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
+ $ hg push -v ../repo1
+ pushing to ../repo1
+ searching for changes
+ lfs: uploading e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0 (17 bytes)
+ abort: detected corrupt lfs object: e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
+ (run hg verify)
+ [255]
+
+Check error message when the remote missed a blob:
+
+ $ echo FFFFF > b
+ $ hg commit -m b -A b
+ $ echo FFFFF >> b
+ $ hg commit -m b b
+ $ rm -rf .hg/store/lfs
+ $ rm -rf `hg config lfs.usercache`
+ $ hg update -C '.^'
+ abort: LFS server error. Remote object for "b" not found:(.*)! (re)
+ [255]
+
+Check error message when object does not exist:
+
+ $ cd $TESTTMP
+ $ hg init test && cd test
+ $ echo "[extensions]" >> .hg/hgrc
+ $ echo "lfs=" >> .hg/hgrc
+ $ echo "[lfs]" >> .hg/hgrc
+ $ echo "threshold=1" >> .hg/hgrc
+ $ echo a > a
+ $ hg add a
+ $ hg commit -m 'test'
+ $ echo aaaaa > a
+ $ hg commit -m 'largefile'
+ $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
+ size 6
+ x-is-binary 0
+ $ cd ..
+ $ rm -rf `hg config lfs.usercache`
+
+(Restart the server in a different location so it no longer has the content)
+
+ $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
+ $ rm $DAEMON_PIDS
+ $ mkdir $TESTTMP/lfs-server2
+ $ cd $TESTTMP/lfs-server2
+#if no-windows
+ $ lfs-test-server &> lfs-server.log &
+ $ echo $! >> $DAEMON_PIDS
+#else
+ $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
+#endif
+
+ $ cd $TESTTMP
+ $ hg clone test test2
+ updating to branch default
+ abort: LFS server error. Remote object for "a" not found:(.*)! (re)
+ [255]
+
+ $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lfs.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,1065 @@
+# Initial setup
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > lfs=
+ > [lfs]
+ > # Test deprecated config
+ > threshold=1000B
+ > EOF
+
+ $ LONG=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
+
+# Prepare server and enable extension
+ $ hg init server
+ $ hg clone -q server client
+ $ cd client
+
+# Commit small file
+ $ echo s > smallfile
+ $ echo '**.py = LF' > .hgeol
+ $ hg --config lfs.track='size(">1000B") | "path:.hgeol"' commit -Aqm "add small file"
+ $ hg debugdata .hgeol 0
+ **.py = LF
+
+# Commit large file
+ $ echo $LONG > largefile
+ $ grep lfs .hg/requires
+ [1]
+ $ hg commit --traceback -Aqm "add large file"
+ $ grep lfs .hg/requires
+ lfs
+
+# Ensure metadata is stored
+ $ hg debugdata largefile 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:f11e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
+ size 1501
+ x-is-binary 0
+
+# Check the blobstore is populated
+ $ find .hg/store/lfs/objects | sort
+ .hg/store/lfs/objects
+ .hg/store/lfs/objects/f1
+ .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
+
+# Check the blob stored contains the actual contents of the file
+ $ cat .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
+
+# Push changes to the server
+
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ abort: lfs.url needs to be configured
+ [255]
+
+ $ cat >> $HGRCPATH << EOF
+ > [lfs]
+ > url=file:$TESTTMP/dummy-remote/
+ > EOF
+
+Push to a local non-lfs repo with the extension enabled will add the
+lfs requirement
+
+ $ grep lfs $TESTTMP/server/.hg/requires
+ [1]
+ $ hg push -v | egrep -v '^(uncompressed| )'
+ pushing to $TESTTMP/server
+ searching for changes
+ lfs: found f11e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b in the local lfs store
+ 2 changesets found
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
+ $ grep lfs $TESTTMP/server/.hg/requires
+ lfs
+
+# Unknown URL scheme
+
+ $ hg push --config lfs.url=ftp://foobar
+ abort: lfs: unknown url scheme: ftp
+ [255]
+
+ $ cd ../
+
+# Initialize new client (not cloning) and setup extension
+ $ hg init client2
+ $ cd client2
+ $ cat >> .hg/hgrc <<EOF
+ > [paths]
+ > default = $TESTTMP/server
+ > EOF
+
+# Pull from server
+
+Pulling a local lfs repo into a local non-lfs repo with the extension
+enabled adds the lfs requirement
+
+ $ grep lfs .hg/requires $TESTTMP/server/.hg/requires
+ $TESTTMP/server/.hg/requires:lfs
+ $ hg pull default
+ pulling from $TESTTMP/server
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ new changesets 0ead593177f7:b88141481348
+ (run 'hg update' to get a working copy)
+ $ grep lfs .hg/requires $TESTTMP/server/.hg/requires
+ .hg/requires:lfs
+ $TESTTMP/server/.hg/requires:lfs
+
+# Check the blobstore is not yet populated
+ $ [ -d .hg/store/lfs/objects ]
+ [1]
+
+# Update to the last revision containing the large file
+ $ hg update
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+# Check the blobstore has been populated on update
+ $ find .hg/store/lfs/objects | sort
+ .hg/store/lfs/objects
+ .hg/store/lfs/objects/f1
+ .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
+
+# Check the contents of the file are fetched from blobstore when requested
+ $ hg cat -r . largefile
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
+
+# Check the file has been copied in the working copy
+ $ cat largefile
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
+
+ $ cd ..
+
+# Check rename, and switch between large and small files
+
+ $ hg init repo3
+ $ cd repo3
+ $ cat >> .hg/hgrc << EOF
+ > [lfs]
+ > track=size(">10B")
+ > EOF
+
+ $ echo LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS > large
+ $ echo SHORTER > small
+ $ hg add . -q
+ $ hg commit -m 'commit with lfs content'
+
+ $ hg mv large l
+ $ hg mv small s
+ $ hg commit -m 'renames'
+
+ $ echo SHORT > l
+ $ echo BECOME-LARGER-FROM-SHORTER > s
+ $ hg commit -m 'large to small, small to large'
+
+ $ echo 1 >> l
+ $ echo 2 >> s
+ $ hg commit -m 'random modifications'
+
+ $ echo RESTORE-TO-BE-LARGE > l
+ $ echo SHORTER > s
+ $ hg commit -m 'switch large and small again'
+
+# Test lfs_files template
+
+ $ hg log -r 'all()' -T '{rev} {join(lfs_files, ", ")}\n'
+ 0 large
+ 1 l
+ 2 s
+ 3 s
+ 4 l
+
+# Push and pull the above repo
+
+ $ hg --cwd .. init repo4
+ $ hg push ../repo4
+ pushing to ../repo4
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 5 changesets with 10 changes to 4 files
+
+ $ hg --cwd .. init repo5
+ $ hg --cwd ../repo5 pull ../repo3
+ pulling from ../repo3
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 5 changesets with 10 changes to 4 files
+ new changesets fd47a419c4f7:5adf850972b9
+ (run 'hg update' to get a working copy)
+
+ $ cd ..
+
+# Test clone
+
+ $ hg init repo6
+ $ cd repo6
+ $ cat >> .hg/hgrc << EOF
+ > [lfs]
+ > track=size(">30B")
+ > EOF
+
+ $ echo LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES > large
+ $ echo SMALL > small
+ $ hg commit -Aqm 'create a lfs file' large small
+ $ hg debuglfsupload -r 'all()' -v
+ lfs: found 8e92251415339ae9b148c8da89ed5ec665905166a1ab11b09dca8fad83344738 in the local lfs store
+
+ $ cd ..
+
+ $ hg clone repo6 repo7
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd repo7
+ $ hg config extensions --debug | grep lfs
+ $TESTTMP/repo7/.hg/hgrc:*: extensions.lfs= (glob)
+ $ cat large
+ LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES
+ $ cat small
+ SMALL
+
+ $ cd ..
+
+ $ hg --config extensions.share= share repo7 sharedrepo
+ updating working directory
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R sharedrepo config extensions --debug | grep lfs
+ $TESTTMP/sharedrepo/.hg/hgrc:*: extensions.lfs= (glob)
+
+# Test rename and status
+
+ $ hg init repo8
+ $ cd repo8
+ $ cat >> .hg/hgrc << EOF
+ > [lfs]
+ > track=size(">10B")
+ > EOF
+
+ $ echo THIS-IS-LFS-BECAUSE-10-BYTES > a1
+ $ echo SMALL > a2
+ $ hg commit -m a -A a1 a2
+ $ hg status
+ $ hg mv a1 b1
+ $ hg mv a2 a1
+ $ hg mv b1 a2
+ $ hg commit -m b
+ $ hg status
+ >>> with open('a2', 'wb') as f:
+ ... f.write(b'\1\nSTART-WITH-HG-FILELOG-METADATA')
+ >>> with open('a1', 'wb') as f:
+ ... f.write(b'\1\nMETA\n')
+ $ hg commit -m meta
+ $ hg status
+ $ hg log -T '{rev}: {file_copies} | {file_dels} | {file_adds}\n'
+ 2: | |
+ 1: a1 (a2)a2 (a1) | |
+ 0: | | a1 a2
+
+ $ for n in a1 a2; do
+ > for r in 0 1 2; do
+ > printf '\n%s @ %s\n' $n $r
+ > hg debugdata $n $r
+ > done
+ > done
+
+ a1 @ 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ size 29
+ x-is-binary 0
+
+ a1 @ 1
+ \x01 (esc)
+ copy: a2
+ copyrev: 50470ad23cf937b1f4b9f80bfe54df38e65b50d9
+ \x01 (esc)
+ SMALL
+
+ a1 @ 2
+ \x01 (esc)
+ \x01 (esc)
+ \x01 (esc)
+ META
+
+ a2 @ 0
+ SMALL
+
+ a2 @ 1
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ size 29
+ x-hg-copy a1
+ x-hg-copyrev be23af27908a582af43e5cda209a5a9b319de8d4
+ x-is-binary 0
+
+ a2 @ 2
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:876dadc86a8542f9798048f2c47f51dbf8e4359aed883e8ec80c5db825f0d943
+ size 32
+ x-is-binary 0
+
+# Verify commit hashes include rename metadata
+
+ $ hg log -T '{rev}:{node|short} {desc}\n'
+ 2:0fae949de7fa meta
+ 1:9cd6bdffdac0 b
+ 0:7f96794915f7 a
+
+ $ cd ..
+
+# Test bundle
+
+ $ hg init repo9
+ $ cd repo9
+ $ cat >> .hg/hgrc << EOF
+ > [lfs]
+ > track=size(">10B")
+ > [diff]
+ > git=1
+ > EOF
+
+ $ for i in 0 single two three 4; do
+ > echo 'THIS-IS-LFS-'$i > a
+ > hg commit -m a-$i -A a
+ > done
+
+ $ hg update 2 -q
+ $ echo 'THIS-IS-LFS-2-CHILD' > a
+ $ hg commit -m branching -q
+
+ $ hg bundle --base 1 bundle.hg -v
+ lfs: found 5ab7a3739a5feec94a562d070a14f36dba7cad17e5484a4a89eea8e5f3166888 in the local lfs store
+ lfs: found a9c7d1cd6ce2b9bbdf46ed9a862845228717b921c089d0d42e3bcaed29eb612e in the local lfs store
+ lfs: found f693890c49c409ec33673b71e53f297681f76c1166daf33b2ad7ebf8b1d3237e in the local lfs store
+ lfs: found fda198fea753eb66a252e9856915e1f5cddbe41723bd4b695ece2604ad3c9f75 in the local lfs store
+ 4 changesets found
+ uncompressed size of bundle content:
+ * (changelog) (glob)
+ * (manifests) (glob)
+ * a (glob)
+ $ hg --config extensions.strip= strip -r 2 --no-backup --force -q
+ $ hg -R bundle.hg log -p -T '{rev} {desc}\n' a
+ 5 branching
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-two
+ +THIS-IS-LFS-2-CHILD
+
+ 4 a-4
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-three
+ +THIS-IS-LFS-4
+
+ 3 a-three
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-two
+ +THIS-IS-LFS-three
+
+ 2 a-two
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-single
+ +THIS-IS-LFS-two
+
+ 1 a-single
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-0
+ +THIS-IS-LFS-single
+
+ 0 a-0
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,1 @@
+ +THIS-IS-LFS-0
+
+ $ hg bundle -R bundle.hg --base 1 bundle-again.hg -q
+ $ hg -R bundle-again.hg log -p -T '{rev} {desc}\n' a
+ 5 branching
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-two
+ +THIS-IS-LFS-2-CHILD
+
+ 4 a-4
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-three
+ +THIS-IS-LFS-4
+
+ 3 a-three
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-two
+ +THIS-IS-LFS-three
+
+ 2 a-two
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-single
+ +THIS-IS-LFS-two
+
+ 1 a-single
+ diff --git a/a b/a
+ --- a/a
+ +++ b/a
+ @@ -1,1 +1,1 @@
+ -THIS-IS-LFS-0
+ +THIS-IS-LFS-single
+
+ 0 a-0
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,1 @@
+ +THIS-IS-LFS-0
+
+ $ cd ..
+
+# Test isbinary
+
+ $ hg init repo10
+ $ cd repo10
+ $ cat >> .hg/hgrc << EOF
+ > [extensions]
+ > lfs=
+ > [lfs]
+ > track=all()
+ > EOF
+ $ $PYTHON <<'EOF'
+ > def write(path, content):
+ > with open(path, 'wb') as f:
+ > f.write(content)
+ > write('a', b'\0\0')
+ > write('b', b'\1\n')
+ > write('c', b'\1\n\0')
+ > write('d', b'xx')
+ > EOF
+ $ hg add a b c d
+ $ hg diff --stat
+ a | Bin
+ b | 1 +
+ c | Bin
+ d | 1 +
+ 4 files changed, 2 insertions(+), 0 deletions(-)
+ $ hg commit -m binarytest
+ $ cat > $TESTTMP/dumpbinary.py << EOF
+ > def reposetup(ui, repo):
+ > for n in 'abcd':
+ > ui.write(('%s: binary=%s\n') % (n, repo['.'][n].isbinary()))
+ > EOF
+ $ hg --config extensions.dumpbinary=$TESTTMP/dumpbinary.py id --trace
+ a: binary=True
+ b: binary=False
+ c: binary=True
+ d: binary=False
+ b55353847f02 tip
+
+ $ cd ..
+
+# Test fctx.cmp fastpath - diff without LFS blobs
+
+ $ hg init repo12
+ $ cd repo12
+ $ cat >> .hg/hgrc <<EOF
+ > [lfs]
+ > threshold=1
+ > EOF
+ $ cat > ../patch.diff <<EOF
+ > # HG changeset patch
+ > 2
+ >
+ > diff --git a/a b/a
+ > old mode 100644
+ > new mode 100755
+ > EOF
+
+ $ for i in 1 2 3; do
+ > cp ../repo10/a a
+ > if [ $i = 3 ]; then
+ > # make a content-only change
+ > hg import -q --bypass ../patch.diff
+ > hg update -q
+ > rm ../patch.diff
+ > else
+ > echo $i >> a
+ > hg commit -m $i -A a
+ > fi
+ > done
+ $ [ -d .hg/store/lfs/objects ]
+
+ $ cd ..
+
+ $ hg clone repo12 repo13 --noupdate
+ $ cd repo13
+ $ hg log --removed -p a -T '{desc}\n' --config diff.nobinary=1 --git
+ 2
+ diff --git a/a b/a
+ old mode 100644
+ new mode 100755
+
+ 2
+ diff --git a/a b/a
+ Binary file a has changed
+
+ 1
+ diff --git a/a b/a
+ new file mode 100644
+ Binary file a has changed
+
+ $ [ -d .hg/store/lfs/objects ]
+ [1]
+
+ $ cd ..
+
+# Test filter
+
+ $ hg init repo11
+ $ cd repo11
+ $ cat >> .hg/hgrc << EOF
+ > [lfs]
+ > track=(**.a & size(">5B")) | (**.b & !size(">5B"))
+ > | (**.c & "path:d" & !"path:d/c.c") | size(">10B")
+ > EOF
+
+ $ mkdir a
+ $ echo aaaaaa > a/1.a
+ $ echo a > a/2.a
+ $ echo aaaaaa > 1.b
+ $ echo a > 2.b
+ $ echo a > 1.c
+ $ mkdir d
+ $ echo a > d/c.c
+ $ echo a > d/d.c
+ $ echo aaaaaaaaaaaa > x
+ $ hg add . -q
+ $ hg commit -m files
+
+ $ for p in a/1.a a/2.a 1.b 2.b 1.c d/c.c d/d.c x; do
+ > if hg debugdata $p 0 2>&1 | grep git-lfs >/dev/null; then
+ > echo "${p}: is lfs"
+ > else
+ > echo "${p}: not lfs"
+ > fi
+ > done
+ a/1.a: is lfs
+ a/2.a: not lfs
+ 1.b: not lfs
+ 2.b: is lfs
+ 1.c: not lfs
+ d/c.c: not lfs
+ d/d.c: is lfs
+ x: is lfs
+
+ $ cd ..
+
+# Verify the repos
+
+ $ cat > $TESTTMP/dumpflog.py << EOF
+ > # print raw revision sizes, flags, and hashes for certain files
+ > import hashlib
+ > from mercurial import revlog
+ > from mercurial.node import short
+ > def hash(rawtext):
+ > h = hashlib.sha512()
+ > h.update(rawtext)
+ > return h.hexdigest()[:4]
+ > def reposetup(ui, repo):
+ > # these 2 files are interesting
+ > for name in ['l', 's']:
+ > fl = repo.file(name)
+ > if len(fl) == 0:
+ > continue
+ > sizes = [revlog.revlog.rawsize(fl, i) for i in fl]
+ > texts = [fl.revision(i, raw=True) for i in fl]
+ > flags = [int(fl.flags(i)) for i in fl]
+ > hashes = [hash(t) for t in texts]
+ > print(' %s: rawsizes=%r flags=%r hashes=%r'
+ > % (name, sizes, flags, hashes))
+ > EOF
+
+ $ for i in client client2 server repo3 repo4 repo5 repo6 repo7 repo8 repo9 \
+ > repo10; do
+ > echo 'repo:' $i
+ > hg --cwd $i verify --config extensions.dumpflog=$TESTTMP/dumpflog.py -q
+ > done
+ repo: client
+ repo: client2
+ repo: server
+ repo: repo3
+ l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
+ s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
+ repo: repo4
+ l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
+ s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
+ repo: repo5
+ l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
+ s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
+ repo: repo6
+ repo: repo7
+ repo: repo8
+ repo: repo9
+ repo: repo10
+
+repo13 doesn't have any cached lfs files and its source never pushed its
+files. Therefore, the files don't exist in the remote store. Use the files in
+the user cache.
+
+ $ test -d $TESTTMP/repo13/.hg/store/lfs/objects
+ [1]
+
+ $ hg --config extensions.share= share repo13 repo14
+ updating working directory
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R repo14 -q verify
+
+ $ hg clone repo13 repo15
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R repo15 -q verify
+
+If the source repo doesn't have the blob (maybe it was pulled or cloned with
+--noupdate), the blob is still accessible via the global cache to send to the
+remote store.
+
+ $ rm -rf $TESTTMP/repo15/.hg/store/lfs
+ $ hg init repo16
+ $ hg -R repo15 push repo16
+ pushing to repo16
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 3 changesets with 2 changes to 1 files
+ $ hg -R repo15 -q verify
+
+Test damaged file scenarios. (This also damages the usercache because of the
+hardlinks.)
+
+ $ echo 'damage' >> repo5/.hg/store/lfs/objects/66/100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+
+Repo with damaged lfs objects in any revision will fail verification.
+
+ $ hg -R repo5 verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ l@1: unpacking 46a2f24864bc: integrity check failed on data/l.i:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on data/large.i:0
+ 4 files, 5 changesets, 10 total revisions
+ 2 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+
+Updates work after cloning a damaged repo, if the damaged lfs objects aren't in
+the update destination. Those objects won't be added to the new repo's store
+because they aren't accessed.
+
+ $ hg clone -v repo5 fromcorrupt
+ updating to branch default
+ resolving manifests
+ getting l
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the usercache
+ getting s
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ test -f fromcorrupt/.hg/store/lfs/objects/66/100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+ [1]
+
+Verify will copy/link all lfs objects into the local store that aren't already
+present. Bypass the corrupted usercache to show that verify works when fed by
+the (uncorrupted) remote store.
+
+ $ hg -R fromcorrupt --config lfs.usercache=emptycache verify -v
+ repository uses revlog format 1
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ lfs: adding 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e to the usercache
+ lfs: found 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e in the local lfs store
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
+ lfs: found 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e in the local lfs store
+ lfs: adding 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 to the usercache
+ lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
+ lfs: adding b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c to the usercache
+ lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ 4 files, 5 changesets, 10 total revisions
+
+Verify will not copy/link a corrupted file from the usercache into the local
+store, and poison it. (The verify with a good remote now works.)
+
+ $ rm -r fromcorrupt/.hg/store/lfs/objects/66/100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+ $ hg -R fromcorrupt verify -v
+ repository uses revlog format 1
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ l@1: unpacking 46a2f24864bc: integrity check failed on data/l.i:0
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
+ large@0: unpacking 2c531e0992ff: integrity check failed on data/large.i:0
+ lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
+ lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ 4 files, 5 changesets, 10 total revisions
+ 2 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+ $ hg -R fromcorrupt --config lfs.usercache=emptycache verify -v
+ repository uses revlog format 1
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ lfs: found 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e in the usercache
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
+ lfs: found 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e in the local lfs store
+ lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
+ lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ 4 files, 5 changesets, 10 total revisions
+
+Damaging a file required by the update destination fails the update.
+
+ $ echo 'damage' >> $TESTTMP/dummy-remote/22/f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ $ hg --config lfs.usercache=emptycache clone -v repo5 fromcorrupt2
+ updating to branch default
+ resolving manifests
+ getting l
+ abort: corrupt remote lfs object: 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ [255]
+
+A corrupted lfs blob is not transferred from a file://remotestore to the
+usercache or local store.
+
+ $ test -f emptycache/22/f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ [1]
+ $ test -f fromcorrupt2/.hg/store/lfs/objects/22/f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ [1]
+
+ $ hg -R fromcorrupt2 verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ l@1: unpacking 46a2f24864bc: integrity check failed on data/l.i:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on data/large.i:0
+ 4 files, 5 changesets, 10 total revisions
+ 2 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+
+Corrupt local files are not sent upstream. (The alternate dummy remote
+avoids the corrupt lfs object in the original remote.)
+
+ $ mkdir $TESTTMP/dummy-remote2
+ $ hg init dest
+ $ hg -R fromcorrupt2 --config lfs.url=file:///$TESTTMP/dummy-remote2 push -v dest
+ pushing to dest
+ searching for changes
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
+ lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
+ lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ abort: detected corrupt lfs object: 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+ (run hg verify)
+ [255]
+
+ $ hg -R fromcorrupt2 --config lfs.url=file:///$TESTTMP/dummy-remote2 verify -v
+ repository uses revlog format 1
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ l@1: unpacking 46a2f24864bc: integrity check failed on data/l.i:0
+ lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
+ large@0: unpacking 2c531e0992ff: integrity check failed on data/large.i:0
+ lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
+ lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ 4 files, 5 changesets, 10 total revisions
+ 2 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+
+ $ cat $TESTTMP/dummy-remote2/22/f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b | $TESTDIR/f --sha256
+ sha256=22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ $ cat fromcorrupt2/.hg/store/lfs/objects/22/f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b | $TESTDIR/f --sha256
+ sha256=22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b
+ $ test -f $TESTTMP/dummy-remote2/66/100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+ [1]
+
+Accessing a corrupt file will complain
+
+ $ hg --cwd fromcorrupt2 cat -r 0 large
+ abort: integrity check failed on data/large.i:0!
+ [255]
+
+lfs -> normal -> lfs round trip conversions are possible. The 'none()'
+predicate on the command line will override whatever is configured globally and
+locally, and ensures everything converts to a regular file. For lfs -> normal,
+there's no 'lfs' destination repo requirement. For normal -> lfs, there is.
+
+ $ hg --config extensions.convert= --config 'lfs.track=none()' \
+ > convert repo8 convert_normal
+ initializing destination convert_normal repository
+ scanning source...
+ sorting...
+ converting...
+ 2 a
+ 1 b
+ 0 meta
+ $ grep 'lfs' convert_normal/.hg/requires
+ [1]
+ $ hg --cwd convert_normal cat a1 -r 0 -T '{rawdata}'
+ THIS-IS-LFS-BECAUSE-10-BYTES
+
+ $ hg --config extensions.convert= --config lfs.threshold=10B \
+ > convert convert_normal convert_lfs
+ initializing destination convert_lfs repository
+ scanning source...
+ sorting...
+ converting...
+ 2 a
+ 1 b
+ 0 meta
+
+ $ hg --cwd convert_lfs cat -r 0 a1 -T '{rawdata}'
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ size 29
+ x-is-binary 0
+ $ hg --cwd convert_lfs debugdata a1 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ size 29
+ x-is-binary 0
+ $ hg --cwd convert_lfs log -r 0 -T "{lfs_files % '{pointer % '{key}={value}\n'}'}"
+ version=https://git-lfs.github.com/spec/v1
+ oid=sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ size=29
+ x-is-binary=0
+ $ hg --cwd convert_lfs log -r 0 \
+ > -T '{lfs_files % "{get(pointer, "oid")}\n"}{lfs_files % "{pointer.oid}\n"}'
+ sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ $ hg --cwd convert_lfs log -r 0 -T '{lfs_files % "{pointer}\n"}'
+ version=https://git-lfs.github.com/spec/v1 oid=sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024 size=29 x-is-binary=0
+ $ hg --cwd convert_lfs \
+ > log -r 'all()' -T '{rev}: {lfs_files % "{file}: {oid}\n"}'
+ 0: a1: 5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ 1: a2: 5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
+ 2: a2: 876dadc86a8542f9798048f2c47f51dbf8e4359aed883e8ec80c5db825f0d943
+
+ $ grep 'lfs' convert_lfs/.hg/requires
+ lfs
+
+The hashes in all stages of the conversion are unchanged.
+
+ $ hg -R repo8 log -T '{node|short}\n'
+ 0fae949de7fa
+ 9cd6bdffdac0
+ 7f96794915f7
+ $ hg -R convert_normal log -T '{node|short}\n'
+ 0fae949de7fa
+ 9cd6bdffdac0
+ 7f96794915f7
+ $ hg -R convert_lfs log -T '{node|short}\n'
+ 0fae949de7fa
+ 9cd6bdffdac0
+ 7f96794915f7
+
+This convert is trickier, because it contains deleted files (via `hg mv`)
+
+ $ hg --config extensions.convert= --config lfs.threshold=1000M \
+ > convert repo3 convert_normal2
+ initializing destination convert_normal2 repository
+ scanning source...
+ sorting...
+ converting...
+ 4 commit with lfs content
+ 3 renames
+ 2 large to small, small to large
+ 1 random modifications
+ 0 switch large and small again
+ $ grep 'lfs' convert_normal2/.hg/requires
+ [1]
+ $ hg --cwd convert_normal2 debugdata large 0
+ LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS
+
+ $ hg --config extensions.convert= --config lfs.threshold=10B \
+ > convert convert_normal2 convert_lfs2
+ initializing destination convert_lfs2 repository
+ scanning source...
+ sorting...
+ converting...
+ 4 commit with lfs content
+ 3 renames
+ 2 large to small, small to large
+ 1 random modifications
+ 0 switch large and small again
+ $ grep 'lfs' convert_lfs2/.hg/requires
+ lfs
+ $ hg --cwd convert_lfs2 debugdata large 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
+ size 39
+ x-is-binary 0
+
+ $ hg -R convert_lfs2 config --debug extensions | grep lfs
+ $TESTTMP/convert_lfs2/.hg/hgrc:*: extensions.lfs= (glob)
+
+Committing deleted files works:
+
+ $ hg init $TESTTMP/repo-del
+ $ cd $TESTTMP/repo-del
+ $ echo 1 > A
+ $ hg commit -m 'add A' -A A
+ $ hg rm A
+ $ hg commit -m 'rm A'
+
+Bad .hglfs files will block the commit with a useful message
+
+ $ cat > .hglfs << EOF
+ > [track]
+ > **.test = size(">5B")
+ > bad file ... no commit
+ > EOF
+
+ $ echo x > file.txt
+ $ hg ci -Aqm 'should fail'
+ hg: parse error at .hglfs:3: bad file ... no commit
+ [255]
+
+ $ cat > .hglfs << EOF
+ > [track]
+ > **.test = size(">5B")
+ > ** = nonexistent()
+ > EOF
+
+ $ hg ci -Aqm 'should fail'
+ abort: parse error in .hglfs: unknown identifier: nonexistent
+ [255]
+
+'**' works out to mean all files.
+
+ $ cat > .hglfs << EOF
+ > [track]
+ > **.test = size(">5B")
+ > **.exclude = none()
+ > ** = size(">10B")
+ > EOF
+
+The LFS policy takes effect as the .hglfs file is committed
+
+ $ echo 'largefile' > lfs.test
+ $ echo '012345678901234567890' > nolfs.exclude
+ $ echo '01234567890123456' > lfs.catchall
+ $ hg ci -Aqm 'added .hglfs'
+ $ hg log -r . -T '{rev}: {lfs_files % "{file}: {oid}\n"}\n'
+ 2: lfs.catchall: d4ec46c2869ba22eceb42a729377432052d9dd75d82fc40390ebaadecee87ee9
+ lfs.test: 5489e6ced8c36a7b267292bde9fd5242a5f80a7482e8f23fa0477393dfaa4d6c
+
+The existing .hglfs file is used even when it is not in the 'A' or 'M' states
+
+ $ echo 'largefile2' > lfs.test
+ $ echo '012345678901234567890a' > nolfs.exclude
+ $ echo '01234567890123456a' > lfs.catchall
+ $ hg ci -qm 'unmodified .hglfs'
+ $ hg log -r . -T '{rev}: {lfs_files % "{file}: {oid}\n"}\n'
+ 3: lfs.catchall: 31f43b9c62b540126b0ad5884dc013d21a61c9329b77de1fceeae2fc58511573
+ lfs.test: 8acd23467967bc7b8cc5a280056589b0ba0b17ff21dbd88a7b6474d6290378a6
+
+Excluding the .hglfs file from the commit postpones the policy change
+
+ $ hg rm .hglfs
+ $ echo 'largefile3' > lfs.test
+ $ echo '012345678901234567890abc' > nolfs.exclude
+ $ echo '01234567890123456abc' > lfs.catchall
+ $ hg ci -qm 'file test' -X .hglfs
+ $ hg log -r . -T '{rev}: {lfs_files % "{file}: {oid}\n"}\n'
+ 4: lfs.catchall: 6747cfb1b83965b4a884e7a6061813ae31e4122028bc6a88d2ac5e5f9e05c5af
+ lfs.test: 3f40b70c2294e91e0fa789ebcf73c5a1d1c7aef270f83e477e40cb0513237e8c
+
+The policy change takes effect when the .hglfs is committed
+
+ $ echo 'largefile4' > lfs.test
+ $ echo '012345678901234567890abcdef' > nolfs.exclude
+ $ echo '01234567890123456abcdef' > lfs.catchall
+ $ hg ci -qm 'file test'
+ $ hg log -r . -T '{rev}: {lfs_files % "{file}: {oid}\n"}\n'
+ 5:
+
+ $ cd ..
+
+Unbundling adds a requirement to a non-lfs repo, if necessary.
+
+ $ hg bundle -R $TESTTMP/repo-del -qr 0 --base null nolfs.hg
+ $ hg bundle -R convert_lfs2 -qr tip --base null lfs.hg
+ $ hg init unbundle
+ $ hg pull -R unbundle -q nolfs.hg
+ $ grep lfs unbundle/.hg/requires
+ [1]
+ $ hg pull -R unbundle -q lfs.hg
+ $ grep lfs unbundle/.hg/requires
+ lfs
+
+ $ hg init no_lfs
+ $ cat >> no_lfs/.hg/hgrc <<EOF
+ > [experimental]
+ > changegroup3 = True
+ > [extensions]
+ > lfs=!
+ > EOF
+ $ cp -R no_lfs no_lfs2
+
+Pushing from a local lfs repo to a local repo without an lfs requirement and
+with lfs disabled, fails.
+
+ $ hg push -R convert_lfs2 no_lfs
+ pushing to no_lfs
+ abort: required features are not supported in the destination: lfs
+ [255]
+ $ grep lfs no_lfs/.hg/requires
+ [1]
+
+Pulling from a local lfs repo to a local repo without an lfs requirement and
+with lfs disabled, fails.
+
+ $ hg pull -R no_lfs2 convert_lfs2
+ pulling from convert_lfs2
+ abort: required features are not supported in the destination: lfs
+ [255]
+ $ grep lfs no_lfs2/.hg/requires
+ [1]
--- a/tests/test-locate.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-locate.t Mon Jan 22 17:53:02 2018 -0500
@@ -90,65 +90,79 @@
$ hg rm t/b
$ hg locate 't/**'
- t/b (glob)
- t/e.h (glob)
- t/x (glob)
+ t/b
+ t/e.h
+ t/x
$ hg files
b
- dir.h/foo (glob)
+ dir.h/foo
t.h
- t/e.h (glob)
- t/x (glob)
+ t/e.h
+ t/x
$ hg files b
b
+-X with explicit path:
+
+ $ hg files b -X b
+ [1]
+
$ mkdir otherdir
$ cd otherdir
$ hg files path:
- ../b (glob)
- ../dir.h/foo (glob)
- ../t.h (glob)
- ../t/e.h (glob)
- ../t/x (glob)
+ ../b
+ ../dir.h/foo
+ ../t.h
+ ../t/e.h
+ ../t/x
$ hg files path:.
- ../b (glob)
- ../dir.h/foo (glob)
- ../t.h (glob)
- ../t/e.h (glob)
- ../t/x (glob)
+ ../b
+ ../dir.h/foo
+ ../t.h
+ ../t/e.h
+ ../t/x
$ hg locate b
- ../b (glob)
- ../t/b (glob)
+ ../b
+ ../t/b
$ hg locate '*.h'
- ../t.h (glob)
- ../t/e.h (glob)
+ ../t.h
+ ../t/e.h
$ hg locate path:t/x
- ../t/x (glob)
+ ../t/x
$ hg locate 're:.*\.h$'
- ../t.h (glob)
- ../t/e.h (glob)
+ ../t.h
+ ../t/e.h
$ hg locate -r 0 b
- ../b (glob)
- ../t/b (glob)
+ ../b
+ ../t/b
$ hg locate -r 0 '*.h'
- ../t.h (glob)
- ../t/e.h (glob)
+ ../t.h
+ ../t/e.h
$ hg locate -r 0 path:t/x
- ../t/x (glob)
+ ../t/x
$ hg locate -r 0 're:.*\.h$'
- ../t.h (glob)
- ../t/e.h (glob)
+ ../t.h
+ ../t/e.h
$ hg files
- ../b (glob)
- ../dir.h/foo (glob)
- ../t.h (glob)
- ../t/e.h (glob)
- ../t/x (glob)
+ ../b
+ ../dir.h/foo
+ ../t.h
+ ../t/e.h
+ ../t/x
$ hg files .
[1]
+Convert native path separator to slash (issue5572)
+
+ $ hg files -T '{path|slashpath}\n'
+ ../b
+ ../dir.h/foo
+ ../t.h
+ ../t/e.h
+ ../t/x
+
$ cd ../..
--- a/tests/test-lock-badness.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-lock-badness.t Mon Jan 22 17:53:02 2018 -0500
@@ -57,14 +57,77 @@
$ echo b > b/b
$ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
$ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
- > > preup 2>&1
+ > > preup-stdout 2>preup-stderr
$ wait
- $ cat preup
+ $ cat preup-stdout
+ $ cat preup-stderr
waiting for lock on working directory of b held by process '*' on host '*' (glob)
got lock after * seconds (glob)
$ cat stdout
adding b
+On processs waiting on another, warning after a long time.
+
+ $ echo b > b/c
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
+ > --config ui.timeout.warn=250 \
+ > > preup-stdout 2>preup-stderr
+ $ wait
+ $ cat preup-stdout
+ $ cat preup-stderr
+ $ cat stdout
+ adding c
+
+On processs waiting on another, warning disabled.
+
+ $ echo b > b/d
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
+ > --config ui.timeout.warn=-1 \
+ > > preup-stdout 2>preup-stderr
+ $ wait
+ $ cat preup-stdout
+ $ cat preup-stderr
+ $ cat stdout
+ adding d
+
+check we still print debug output
+
+On processs waiting on another, warning after a long time (debug output on)
+
+ $ echo b > b/e
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
+ > --config ui.timeout.warn=250 --debug\
+ > > preup-stdout 2>preup-stderr
+ $ wait
+ $ cat preup-stdout
+ calling hook pre-update: hghook_pre-update.sleephalf
+ waiting for lock on working directory of b held by process '*' on host '*' (glob)
+ got lock after * seconds (glob)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cat preup-stderr
+ $ cat stdout
+ adding e
+
+On processs waiting on another, warning disabled, (debug output on)
+
+ $ echo b > b/f
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
+ > --config ui.timeout.warn=-1 --debug\
+ > > preup-stdout 2>preup-stderr
+ $ wait
+ $ cat preup-stdout
+ calling hook pre-update: hghook_pre-update.sleephalf
+ waiting for lock on working directory of b held by process '*' on host '*' (glob)
+ got lock after * seconds (glob)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cat preup-stderr
+ $ cat stdout
+ adding f
+
Pushing to a local read-only repo that can't be locked
$ chmod 100 a/.hg/store
--- a/tests/test-log.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-log.t Mon Jan 22 17:53:02 2018 -0500
@@ -102,6 +102,10 @@
summary: c
+-X, with explicit path
+
+ $ hg log a -X a
+
-f, non-existent directory
$ hg log -f dir
@@ -461,6 +465,104 @@
2
0
+follow files from the specified revisions (issue4959)
+
+ $ hg log -G -T '{rev} {files},{file_copies % " {source}->{name}"}\n'
+ @ 4 dir/b e, dir/b->e
+ |
+ o 3 a b d g, a->b f->g
+ |
+ o 2 b dir/b f g, b->dir/b
+ |
+ o 1 b g, a->b f->g
+ |
+ o 0 a f,
+
+
+ $ hg log -T '{rev}\n' -fr 4 e
+ 4
+ 2
+ 1
+ 0
+ $ hg log -T '{rev}\n' -fr 2 g
+ 2
+ 1
+ 0
+ $ hg log -T '{rev}\n' -fr '2+3' g
+ 3
+ 2
+ 1
+ 0
+
+follow files from the specified revisions with glob patterns (issue5053)
+(BROKEN: should follow copies from e@4)
+
+ $ hg log -T '{rev}\n' -fr4 e -X '[abcdfg]'
+ 4
+ 2 (false !)
+ 1 (false !)
+ 0 (false !)
+
+follow files from the specified revisions with missing patterns
+(BROKEN: should follow copies from e@4)
+
+ $ hg log -T '{rev}\n' -fr4 e x
+ 4
+ 2 (false !)
+ 1 (false !)
+ 0 (false !)
+
+follow files from the specified revisions across copies with -p/--patch
+
+ $ hg log -T '== rev: {rev},{file_copies % " {source}->{name}"} ==\n' -fpr 4 e g
+ == rev: 4, dir/b->e ==
+ diff -r 2ca5ba701980 -r 7e4639b4691b e
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/e Thu Jan 01 00:00:05 1970 +0000
+ @@ -0,0 +1,1 @@
+ +a
+
+ == rev: 3, a->b f->g ==
+ diff -r f8954cd4dc1f -r 2ca5ba701980 g
+ --- a/g Thu Jan 01 00:00:03 1970 +0000
+ +++ b/g Thu Jan 01 00:00:04 1970 +0000
+ @@ -1,2 +1,2 @@
+ f
+ -g
+ +f
+
+ == rev: 2, b->dir/b ==
+ diff -r d89b0a12d229 -r f8954cd4dc1f dir/b
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/dir/b Thu Jan 01 00:00:03 1970 +0000
+ @@ -0,0 +1,1 @@
+ +a
+ diff -r d89b0a12d229 -r f8954cd4dc1f f
+ --- a/f Thu Jan 01 00:00:02 1970 +0000
+ +++ b/f Thu Jan 01 00:00:03 1970 +0000
+ @@ -1,1 +1,2 @@
+ f
+ +f
+
+ == rev: 1, a->b f->g ==
+ diff -r 9161b9aeaf16 -r d89b0a12d229 b
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/b Thu Jan 01 00:00:02 1970 +0000
+ @@ -0,0 +1,1 @@
+ +a
+
+ == rev: 0, ==
+ diff -r 000000000000 -r 9161b9aeaf16 a
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/a Thu Jan 01 00:00:01 1970 +0000
+ @@ -0,0 +1,1 @@
+ +a
+ diff -r 000000000000 -r 9161b9aeaf16 f
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/f Thu Jan 01 00:00:01 1970 +0000
+ @@ -0,0 +1,1 @@
+ +f
+
log copies with --copies
@@ -711,6 +813,15 @@
date: Thu Jan 01 00:00:01 1970 +0000
summary: base
+
+log -fr with aliases: 'A' should be expanded, but 'reverse()' should have no
+effect
+
+ $ hg log --config 'revsetalias.reverse(x)=x' --config 'revsetalias.A=1+4' -qfrA
+ 4:ddb82e70d1a1
+ 1:3d5bf5654eda
+ 0:67e992f2c4f3
+
log -r "follow('set:grep(b2)')"
$ hg log -r "follow('set:grep(b2)')"
@@ -732,6 +843,35 @@
date: Thu Jan 01 00:00:01 1970 +0000
summary: b2
+
+follow files starting from multiple revisions:
+
+ $ hg log -T '{rev}: {files}\n' -r "follow('glob:b?', startrev=2+3+4)"
+ 3: b1
+ 4: b2
+
+follow files starting from empty revision:
+
+ $ hg log -T '{rev}: {files}\n' -r "follow('glob:*', startrev=.-.)"
+
+follow starting from revisions:
+
+ $ hg log -Gq -r "follow(startrev=2+4)"
+ o 4:ddb82e70d1a1
+ |
+ | o 2:60c670bf5b30
+ | |
+ | o 1:3d5bf5654eda
+ |/
+ @ 0:67e992f2c4f3
+
+
+follow the current revision:
+
+ $ hg log -Gq -r "follow()"
+ @ 0:67e992f2c4f3
+
+
$ hg up -qC 4
log -f -r null
@@ -1009,6 +1149,77 @@
$ cd ..
+Multiple copy sources of a file:
+
+ $ hg init follow-multi
+ $ cd follow-multi
+ $ echo 0 >> a
+ $ hg ci -qAm 'a'
+ $ hg cp a b
+ $ hg ci -m 'a->b'
+ $ echo 2 >> a
+ $ hg ci -m 'a'
+ $ echo 3 >> b
+ $ hg ci -m 'b'
+ $ echo 4 >> a
+ $ echo 4 >> b
+ $ hg ci -m 'a,b'
+ $ echo 5 >> a
+ $ hg ci -m 'a0'
+ $ echo 6 >> b
+ $ hg ci -m 'b0'
+ $ hg up -q 4
+ $ echo 7 >> b
+ $ hg ci -m 'b1'
+ created new head
+ $ echo 8 >> a
+ $ hg ci -m 'a1'
+ $ hg rm a
+ $ hg mv b a
+ $ hg ci -m 'b1->a1'
+ $ hg merge -qt :local
+ $ hg ci -m '(a0,b1->a1)->a'
+
+ $ hg log -GT '{rev}: {desc}\n'
+ @ 10: (a0,b1->a1)->a
+ |\
+ | o 9: b1->a1
+ | |
+ | o 8: a1
+ | |
+ | o 7: b1
+ | |
+ o | 6: b0
+ | |
+ o | 5: a0
+ |/
+ o 4: a,b
+ |
+ o 3: b
+ |
+ o 2: a
+ |
+ o 1: a->b
+ |
+ o 0: a
+
+
+ since file 'a' has multiple copy sources at the revision 4, ancestors can't
+ be indexed solely by fctx.linkrev().
+
+ $ hg log -T '{rev}: {desc}\n' -f a
+ 10: (a0,b1->a1)->a
+ 9: b1->a1
+ 7: b1
+ 5: a0
+ 4: a,b
+ 3: b
+ 2: a
+ 1: a->b
+ 0: a
+
+ $ cd ..
+
Test that log should respect the order of -rREV even if multiple OR conditions
are specified (issue5100):
@@ -1152,6 +1363,16 @@
$ hg log -u "user3"
+"-u USER" shouldn't be overridden by "user(USER)" alias
+
+ $ hg log --config 'revsetalias.user(x)=branch(x)' -u default
+ $ hg log --config 'revsetalias.user(x)=branch(x)' -u user1
+ changeset: 0:29a4c94f1924
+ user: User One <user1@example.org>
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: a
+
+
$ cd ..
$ hg init branches
@@ -1474,7 +1695,7 @@
merging foo
warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ echo 'merge 1' > foo
$ hg resolve -m foo
@@ -1485,7 +1706,7 @@
merging foo
warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ echo 'merge 2' > foo
$ hg resolve -m foo
@@ -1602,11 +1823,6 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: add foo, related
- changeset: 2:c4c64aedf0f7
- user: test
- date: Thu Jan 01 00:00:00 1970 +0000
- summary: add unrelated old foo
-
$ cd ..
Issue2383: hg log showing _less_ differences than hg diff
@@ -1717,13 +1933,15 @@
1:a765632148dc55d38c35c4f247c618701886cb2f
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
$ hg log -r a
- abort: hidden revision 'a'!
+ abort: hidden revision 'a' is pruned!
(use --hidden to access hidden revisions)
[255]
test that parent prevent a changeset to be hidden
$ hg up 1 -q --hidden
+ updating to a hidden changeset a765632148dc
+ (hidden revision 'a765632148dc' is pruned)
$ hg log --template='{rev}:{node}\n'
1:a765632148dc55d38c35c4f247c618701886cb2f
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
@@ -1740,6 +1958,8 @@
bookmarks prevent a changeset being hidden
$ hg bookmark --hidden -r 1 X
+ bookmarking hidden changeset a765632148dc
+ (hidden revision 'a765632148dc' is pruned)
$ hg log --template '{rev}:{node}\n'
1:a765632148dc55d38c35c4f247c618701886cb2f
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
@@ -1748,6 +1968,8 @@
divergent bookmarks are not hidden
$ hg bookmark --hidden -r 1 X@foo
+ bookmarking hidden changeset a765632148dc
+ (hidden revision 'a765632148dc' is pruned)
$ hg log --template '{rev}:{node}\n'
1:a765632148dc55d38c35c4f247c618701886cb2f
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
@@ -1774,7 +1996,7 @@
2:94375ec45bddd2a824535fc04855bd058c926ec0
3:d7d28b288a6b83d5d2cf49f10c5974deed3a1d2e
$ hg log -T'{rev}:{node}\n' -r:0
- abort: hidden revision '0'!
+ abort: hidden revision '0' is pruned!
(use --hidden to access hidden revisions)
[255]
$ hg log -T'{rev}:{node}\n' -f
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-logexchange.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,108 @@
+Testing the functionality to pull remotenames
+=============================================
+
+ $ cat >> $HGRCPATH << EOF
+ > [alias]
+ > glog = log -G -T '{rev}:{node|short} {desc}'
+ > [experimental]
+ > remotenames = True
+ > EOF
+
+Making a server repo
+--------------------
+
+ $ hg init server
+ $ cd server
+ $ for ch in a b c d e f g h; do
+ > echo "foo" >> $ch
+ > hg ci -Aqm "Added "$ch
+ > done
+ $ hg glog
+ @ 7:ec2426147f0e Added h
+ |
+ o 6:87d6d6676308 Added g
+ |
+ o 5:825660c69f0c Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+ $ hg bookmark -r 3 foo
+ $ hg bookmark -r 6 bar
+ $ hg up 4
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ $ hg branch wat
+ marked working directory as branch wat
+ (branches are permanent and global, did you want a bookmark?)
+ $ echo foo >> bar
+ $ hg ci -Aqm "added bar"
+
+Making a client repo
+--------------------
+
+ $ cd ..
+
+ $ hg clone server client
+ updating to branch default
+ 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ cd client
+ $ cat .hg/logexchange/bookmarks
+ 0
+
+ 87d6d66763085b629e6d7ed56778c79827273022\x00file:$TESTTMP/server\x00bar (esc)
+ 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00file:$TESTTMP/server\x00foo (esc)
+
+ $ cat .hg/logexchange/branches
+ 0
+
+ ec2426147f0e39dbc9cef599b066be6035ce691d\x00file:$TESTTMP/server\x00default (esc)
+ 3e1487808078543b0af6d10dadf5d46943578db0\x00file:$TESTTMP/server\x00wat (esc)
+
+Making a new server
+-------------------
+
+ $ cd ..
+ $ hg init server2
+ $ cd server2
+ $ hg pull ../server/
+ pulling from ../server/
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 9 changesets with 9 changes to 9 files (+1 heads)
+ adding remote bookmark bar
+ adding remote bookmark foo
+ new changesets 18d04c59bb5d:3e1487808078
+ (run 'hg heads' to see heads)
+
+Pulling form the new server
+---------------------------
+ $ cd ../client/
+ $ hg pull ../server2/
+ pulling from ../server2/
+ searching for changes
+ no changes found
+ $ cat .hg/logexchange/bookmarks
+ 0
+
+ 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00file:$TESTTMP/server\x00foo (esc)
+ 87d6d66763085b629e6d7ed56778c79827273022\x00file:$TESTTMP/server\x00bar (esc)
+ 87d6d66763085b629e6d7ed56778c79827273022\x00file:$TESTTMP/server2\x00bar (esc)
+ 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00file:$TESTTMP/server2\x00foo (esc)
+
+ $ cat .hg/logexchange/branches
+ 0
+
+ 3e1487808078543b0af6d10dadf5d46943578db0\x00file:$TESTTMP/server\x00wat (esc)
+ ec2426147f0e39dbc9cef599b066be6035ce691d\x00file:$TESTTMP/server\x00default (esc)
+ ec2426147f0e39dbc9cef599b066be6035ce691d\x00file:$TESTTMP/server2\x00default (esc)
+ 3e1487808078543b0af6d10dadf5d46943578db0\x00file:$TESTTMP/server2\x00wat (esc)
--- a/tests/test-manifest.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-manifest.t Mon Jan 22 17:53:02 2018 -0500
@@ -26,7 +26,7 @@
$ hg files -vr .
2 a
- 2 x b/a (glob)
+ 2 x b/a
1 l l
$ hg files -r . -X b
a
--- a/tests/test-merge-changedelete.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-changedelete.t Mon Jan 22 17:53:02 2018 -0500
@@ -61,7 +61,7 @@
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
@@ -128,7 +128,7 @@
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
@@ -205,7 +205,7 @@
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 1 files merged, 1 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
@@ -269,7 +269,7 @@
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 1 files removed, 2 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
@@ -422,7 +422,7 @@
$ hg merge --tool :fail
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status 2>&1 | tee $TESTTMP/fail.status
--- status ---
@@ -479,7 +479,7 @@
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status 2>&1 | tee $TESTTMP/prompt.status
--- status ---
@@ -538,7 +538,7 @@
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3? u
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
--- status ---
@@ -596,7 +596,7 @@
merging file3
warning: conflicts while merging file3! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ status
--- status ---
--- a/tests/test-merge-criss-cross.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-criss-cross.t Mon Jan 22 17:53:02 2018 -0500
@@ -111,16 +111,16 @@
picked tool ':dump' for f2 (binary False symlink False changedelete False)
my f2@6373bbfdae1d+ other f2@e673248094b1 ancestor f2@0f6b37dbe527
3 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ f --dump --recurse *
d2: directory with 2 files
- d2/f3: (glob)
+ d2/f3:
>>>
0 base
<<<
- d2/f4: (glob)
+ d2/f4:
>>>
0 base
<<<
@@ -158,7 +158,7 @@
getting d1/f3 to d2/f3
merging f2
3 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
Redo merge with merge.preferancestor="*" to enable bid merge
@@ -222,11 +222,11 @@
$ f --dump --recurse *
d2: directory with 2 files
- d2/f3: (glob)
+ d2/f3:
>>>
0 base
<<<
- d2/f4: (glob)
+ d2/f4:
>>>
0 base
<<<
@@ -308,11 +308,11 @@
$ f --dump --recurse *
d2: directory with 2 files
- d2/f3: (glob)
+ d2/f3:
>>>
0 base
<<<
- d2/f4: (glob)
+ d2/f4:
>>>
0 base
<<<
--- a/tests/test-merge-force.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-force.t Mon Jan 22 17:53:02 2018 -0500
@@ -216,7 +216,7 @@
warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark')
18 files updated, 3 files merged, 8 files removed, 35 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
Check which files need to be resolved (should correspond to the output above).
This should be the files for which the base (1st filename segment), the remote
--- a/tests/test-merge-internal-tools-pattern.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-internal-tools-pattern.t Mon Jan 22 17:53:02 2018 -0500
@@ -43,7 +43,7 @@
$ hg merge
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat f
--- a/tests/test-merge-remove.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-remove.t Mon Jan 22 17:53:02 2018 -0500
@@ -106,7 +106,7 @@
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
merging foo1 and foo to foo1
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat bar
bleh
--- a/tests/test-merge-subrepos.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-subrepos.t Mon Jan 22 17:53:02 2018 -0500
@@ -122,7 +122,7 @@
$ hg files -R subrepo
[1]
$ hg files -R subrepo -r '.'
- subrepo/b (glob)
+ subrepo/b
$ hg bookmark -r tip @other
$ echo xyz > subrepo/c
--- a/tests/test-merge-tools.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-tools.t Mon Jan 22 17:53:02 2018 -0500
@@ -71,7 +71,7 @@
merging f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -98,7 +98,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -120,7 +120,7 @@
merging f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ rm false
@@ -134,7 +134,7 @@
merging f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ rmdir false
@@ -170,7 +170,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -193,7 +193,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -215,7 +215,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -238,7 +238,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -261,7 +261,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -371,7 +371,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -397,7 +397,7 @@
couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -423,7 +423,7 @@
couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -449,7 +449,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -471,7 +471,7 @@
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:fail
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -533,7 +533,7 @@
$ hg merge -r 2 --config ui.merge=internal:prompt
keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -557,7 +557,7 @@
> EOF
keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -579,7 +579,7 @@
$ hg merge -r 2 --config ui.merge=internal:prompt --config ui.interactive=true
keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -637,7 +637,7 @@
$ hg merge -r 2 --config ui.merge=internal:dump
merging f
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -707,7 +707,7 @@
$ hg merge -r 3 --config ui.merge=internal:forcedump
merging f
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -749,7 +749,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -775,7 +775,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -800,7 +800,7 @@
merging f
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -1240,7 +1240,7 @@
was merge successful (yn)? n
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ aftermerge
# cat f
@@ -1265,7 +1265,7 @@
warning: internal :merge cannot merge symlinks for f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
#endif
--- a/tests/test-merge-types.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge-types.t Mon Jan 22 17:53:02 2018 -0500
@@ -42,7 +42,7 @@
warning: internal :merge cannot merge symlinks for a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
@@ -75,7 +75,7 @@
warning: internal :union cannot merge symlinks for a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
@@ -98,7 +98,7 @@
warning: internal :merge3 cannot merge symlinks for a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
@@ -120,7 +120,7 @@
my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
warning: internal :merge-local cannot merge symlinks for a
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
@@ -142,7 +142,7 @@
my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
warning: internal :merge-other cannot merge symlinks for a
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
@@ -211,7 +211,7 @@
warning: internal :merge cannot merge symlinks for f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout f
f is a symlink:
@@ -223,7 +223,7 @@
warning: internal :merge cannot merge symlinks for f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout f
f is a plain file with content:
@@ -250,7 +250,7 @@
warning: internal :merge cannot merge symlinks for f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout f
f is a symlink:
@@ -262,7 +262,7 @@
warning: internal :merge cannot merge symlinks for f
warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout f
f is a plain file with content:
@@ -354,7 +354,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
3 files updated, 0 files merged, 0 files removed, 6 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg resolve -l
U a
@@ -416,7 +416,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
3 files updated, 0 files merged, 0 files removed, 6 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ tellmeabout a
a is a plain file with content:
--- a/tests/test-merge7.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge7.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
merging test.txt
warning: conflicts while merging test.txt! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
resolve conflict
$ cat >test.txt <<"EOF"
@@ -96,7 +96,7 @@
my test.txt@50c3a7e29886+ other test.txt@40d11a4173a8 ancestor test.txt@96b70246a118
warning: conflicts while merging test.txt! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ cat test.txt
--- a/tests/test-merge9.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-merge9.t Mon Jan 22 17:53:02 2018 -0500
@@ -30,7 +30,7 @@
merging foo and baz to baz
merging bar failed!
1 files updated, 1 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg resolve -l
U bar
@@ -44,7 +44,7 @@
merging baz and foo to baz
merging bar failed!
1 files updated, 1 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
show unresolved
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-minifileset.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,38 @@
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+import sys
+
+# make it runnable directly without run-tests.py
+sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
+
+from mercurial import minifileset
+
+def check(text, truecases, falsecases):
+ f = minifileset.compile(text)
+ for args in truecases:
+ if not f(*args):
+ print('unexpected: %r should include %r' % (text, args))
+ for args in falsecases:
+ if f(*args):
+ print('unexpected: %r should exclude %r' % (text, args))
+
+check('all()', [('a.php', 123), ('b.txt', 0)], [])
+check('none()', [], [('a.php', 123), ('b.txt', 0)])
+check('!!!!((!(!!all())))', [], [('a.php', 123), ('b.txt', 0)])
+
+check('"path:a" & (**.b | **.c)', [('a/b.b', 0), ('a/c.c', 0)], [('b/c.c', 0)])
+check('(path:a & **.b) | **.c',
+ [('a/b.b', 0), ('a/c.c', 0), ('b/c.c', 0)], [])
+
+check('**.bin - size("<20B")', [('b.bin', 21)], [('a.bin', 11), ('b.txt', 21)])
+
+check('!!**.bin or size(">20B") + "path:bin" or !size(">10")',
+ [('a.bin', 11), ('b.txt', 21), ('bin/abc', 11)],
+ [('a.notbin', 11), ('b.txt', 11), ('bin2/abc', 11)])
+
+check('(**.php and size(">10KB")) | **.zip | ("path:bin" & !"path:bin/README") '
+ ' | size(">1M")',
+ [('a.php', 15000), ('a.zip', 0), ('bin/a', 0), ('bin/README', 1e7)],
+ [('a.php', 5000), ('b.zip2', 0), ('t/bin/a', 0), ('bin/README', 1)])
--- a/tests/test-mq-header-date.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-header-date.t Mon Jan 22 17:53:02 2018 -0500
@@ -11,11 +11,11 @@
> }
$ catlog() {
> catpatch $1
- > hg log --template "{rev}: {desc} - {author}\n"
+ > hg log --template "{rev}: {node|short} {desc} - {author}\n"
> }
$ catlogd() {
> catpatch $1
- > hg log --template "{rev}: {desc} - {author} - {date}\n"
+ > hg log --template "{rev}: {node|short} {desc} - {author} - {date}\n"
> }
$ drop() {
> hg qpop
@@ -189,7 +189,7 @@
> echo ==== "qpop -a / qpush -a"
> hg qpop -a
> hg qpush -a
- > hg log --template "{rev}: {desc} - {author} - {date}\n"
+ > hg log --template "{rev}: {node|short} {desc} - {author} - {date}\n"
> }
======= plain headers
@@ -202,7 +202,7 @@
==== qnew -d
Date: 3 0
- 0: [mq]: 1.patch - test - 3.00
+ 0: 758bd2596a39 [mq]: 1.patch - test - 3.00
==== qref
adding 1
Date: 3 0
@@ -212,7 +212,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test - 3.00
+ 0: 8c640e9949a8 [mq]: 1.patch - test - 3.00
==== qref -d
Date: 4 0
@@ -221,7 +221,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test - 4.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew
adding 2
diff -r ... 2
@@ -229,8 +229,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - test
+ 1: fc7e8a2f6499 [mq]: 2.patch - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
Date: 5 0
@@ -239,8 +239,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - test
+ 1: 1d9a6a118fd1 [mq]: 2.patch - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 2.patch
now at: 1.patch
==== qnew -d -m
@@ -248,8 +248,8 @@
Three
- 1: Three - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 2a9ef0bdefba Three - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref
adding 3
Date: 6 0
@@ -261,8 +261,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Three - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 7f19ad9eea7b Three - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -m
Date: 6 0
@@ -273,8 +273,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Drei - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 7ff7377793e3 Drei - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -d
Date: 7 0
@@ -285,8 +285,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Drei - test - 7.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: d89d3144f518 Drei - test - 7.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -d -m
Date: 8 0
@@ -297,8 +297,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew -m
adding 4
Four
@@ -308,9 +308,9 @@
+++ b/4
@@ -0,0 +1,1 @@
+4
- 2: Four - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 2: 74ded07d166b Four - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
Date: 9 0
@@ -321,9 +321,9 @@
+++ b/4
@@ -0,0 +1,1 @@
+4
- 2: Four - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 2: 1a651320cf8e Four - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 4.patch
now at: 3.patch
==== qnew with HG header
@@ -331,9 +331,9 @@
now at: 3.patch
# HG changeset patch
# Date 10 0
- 2: imported patch 5.patch - test - 10.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: d16a272220d2 imported patch 5.patch - test - 10.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref
adding 5
# HG changeset patch
@@ -345,9 +345,9 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 2: [mq]: 5.patch - test - 10.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: 5dbf69c07df9 [mq]: 5.patch - test - 10.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref -d
# HG changeset patch
# Date 11 0
@@ -358,19 +358,19 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew with plain header
popping 6.patch
now at: 5.patch
now at: 6.patch
Date: 12 0
- 3: imported patch 6.patch - test
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 8ad9ebc22b96 imported patch 6.patch - test
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== hg qref
adding 6
Date: 12 0
@@ -380,10 +380,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - test - 12.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 038c46b02a56 [mq]: 6.patch - test - 12.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref -d
Date: 13 0
@@ -392,10 +392,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - test - 13.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 2785642ea4b4 [mq]: 6.patch - test - 13.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
popping 6.patch
now at: 5.patch
==== qnew -u
@@ -407,10 +407,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - jane
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: a05a33f187ce [mq]: 6.patch - jane
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
From: jane
Date: 12 0
@@ -420,10 +420,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - jane
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 5702c529dfe9 [mq]: 6.patch - jane
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 6.patch
now at: 5.patch
==== qnew -d
@@ -435,10 +435,10 @@
+++ b/7
@@ -0,0 +1,1 @@
+7
- 3: [mq]: 7.patch - test
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 8dd1eb8d4132 [mq]: 7.patch - test
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u
From: john
Date: 13 0
@@ -448,10 +448,10 @@
+++ b/7
@@ -0,0 +1,1 @@
+7
- 3: [mq]: 7.patch - john - 13.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 4f9d07369cc4 [mq]: 7.patch - john - 13.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew
adding 8
diff -r ... 8
@@ -459,11 +459,11 @@
+++ b/8
@@ -0,0 +1,1 @@
+8
- 4: [mq]: 8.patch - test
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: 868b62f09492 [mq]: 8.patch - test
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u -d
From: john
Date: 14 0
@@ -473,11 +473,11 @@
+++ b/8
@@ -0,0 +1,1 @@
+8
- 4: [mq]: 8.patch - john
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: b1e878ae55b9 [mq]: 8.patch - john
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 8.patch
now at: 7.patch
==== qnew -m
@@ -489,11 +489,11 @@
+++ b/9
@@ -0,0 +1,1 @@
+9
- 4: Nine - test
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: 7251936ac2bf Nine - test
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u -d
From: john
Date: 15 0
@@ -505,11 +505,11 @@
+++ b/9
@@ -0,0 +1,1 @@
+9
- 4: Nine - john
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: a0de5bf6e9f7 Nine - john
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 9.patch
now at: 7.patch
==== qpop -a / qpush -a
@@ -523,10 +523,10 @@
applying 5.patch
applying 7.patch
now at: 7.patch
- 3: imported patch 7.patch - john - 13.00
- 2: imported patch 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: imported patch 1.patch - test - 4.00
+ 3: d26a5b7ffce1 imported patch 7.patch - john - 13.00
+ 2: dda6cf77060a imported patch 5.patch - test - 11.00
+ 1: 25e32d66c8c7 Three (again) - test - 8.00
+ 0: e5011c0211fe imported patch 1.patch - test - 4.00
$ rm -r sandbox
======= hg headers
@@ -540,7 +540,7 @@
# Date 3 0
# Parent
- 0: [mq]: 1.patch - test - 3.00
+ 0: 758bd2596a39 [mq]: 1.patch - test - 3.00
==== qref
adding 1
# HG changeset patch
@@ -552,7 +552,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test - 3.00
+ 0: 8c640e9949a8 [mq]: 1.patch - test - 3.00
==== qref -d
# HG changeset patch
# Date 4 0
@@ -563,7 +563,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test - 4.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew
adding 2
# HG changeset patch
@@ -574,8 +574,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - test
+ 1: fc7e8a2f6499 [mq]: 2.patch - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
# HG changeset patch
# Date 5 0
@@ -586,8 +586,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - test
+ 1: 1d9a6a118fd1 [mq]: 2.patch - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 2.patch
now at: 1.patch
==== qnew -d -m
@@ -596,8 +596,8 @@
# Parent
Three
- 1: Three - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 2a9ef0bdefba Three - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref
adding 3
# HG changeset patch
@@ -610,8 +610,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Three - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 7f19ad9eea7b Three - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -m
# HG changeset patch
# Date 6 0
@@ -623,8 +623,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Drei - test - 6.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: 7ff7377793e3 Drei - test - 6.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -d
# HG changeset patch
# Date 7 0
@@ -636,8 +636,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Drei - test - 7.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: d89d3144f518 Drei - test - 7.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qref -d -m
# HG changeset patch
# Date 8 0
@@ -649,8 +649,8 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew -m
adding 4
# HG changeset patch
@@ -662,9 +662,9 @@
+++ b/4
@@ -0,0 +1,1 @@
+4
- 2: Four - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 2: 74ded07d166b Four - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
# HG changeset patch
# Date 9 0
@@ -676,9 +676,9 @@
+++ b/4
@@ -0,0 +1,1 @@
+4
- 2: Four - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 2: 1a651320cf8e Four - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 4.patch
now at: 3.patch
==== qnew with HG header
@@ -686,9 +686,9 @@
now at: 3.patch
# HG changeset patch
# Date 10 0
- 2: imported patch 5.patch - test - 10.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: d16a272220d2 imported patch 5.patch - test - 10.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref
adding 5
# HG changeset patch
@@ -700,9 +700,9 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 2: [mq]: 5.patch - test - 10.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: 5dbf69c07df9 [mq]: 5.patch - test - 10.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref -d
# HG changeset patch
# Date 11 0
@@ -713,19 +713,19 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew with plain header
popping 6.patch
now at: 5.patch
now at: 6.patch
Date: 12 0
- 3: imported patch 6.patch - test
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 8ad9ebc22b96 imported patch 6.patch - test
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== hg qref
adding 6
Date: 12 0
@@ -735,10 +735,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - test - 12.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 038c46b02a56 [mq]: 6.patch - test - 12.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== hg qref -d
Date: 13 0
@@ -747,10 +747,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - test - 13.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 2785642ea4b4 [mq]: 6.patch - test - 13.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
popping 6.patch
now at: 5.patch
==== qnew -u
@@ -764,10 +764,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - jane
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: a05a33f187ce [mq]: 6.patch - jane
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -d
# HG changeset patch
# User jane
@@ -779,10 +779,10 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 3: [mq]: 6.patch - jane
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 5702c529dfe9 [mq]: 6.patch - jane
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 6.patch
now at: 5.patch
==== qnew -d
@@ -796,10 +796,10 @@
+++ b/7
@@ -0,0 +1,1 @@
+7
- 3: [mq]: 7.patch - test
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 3: 8dd1eb8d4132 [mq]: 7.patch - test
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u
# HG changeset patch
# User john
@@ -811,10 +811,10 @@
+++ b/7
@@ -0,0 +1,1 @@
+7
- 3: [mq]: 7.patch - john - 13.00
- 2: [mq]: 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: [mq]: 1.patch - test - 4.00
+ 3: 4f9d07369cc4 [mq]: 7.patch - john - 13.00
+ 2: 049de6af0c1d [mq]: 5.patch - test - 11.00
+ 1: b1b6b0fe0e6d Three (again) - test - 8.00
+ 0: 4a67dfeea974 [mq]: 1.patch - test - 4.00
==== qnew
adding 8
# HG changeset patch
@@ -825,11 +825,11 @@
+++ b/8
@@ -0,0 +1,1 @@
+8
- 4: [mq]: 8.patch - test
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: 868b62f09492 [mq]: 8.patch - test
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u -d
# HG changeset patch
# User john
@@ -841,11 +841,11 @@
+++ b/8
@@ -0,0 +1,1 @@
+8
- 4: [mq]: 8.patch - john
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: b1e878ae55b9 [mq]: 8.patch - john
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 8.patch
now at: 7.patch
==== qnew -m
@@ -859,11 +859,11 @@
+++ b/9
@@ -0,0 +1,1 @@
+9
- 4: Nine - test
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: 7251936ac2bf Nine - test
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
==== qref -u -d
# HG changeset patch
# User john
@@ -876,11 +876,11 @@
+++ b/9
@@ -0,0 +1,1 @@
+9
- 4: Nine - john
- 3: [mq]: 7.patch - john
- 2: [mq]: 5.patch - test
- 1: Three (again) - test
- 0: [mq]: 1.patch - test
+ 4: a0de5bf6e9f7 Nine - john
+ 3: 4f9d07369cc4 [mq]: 7.patch - john
+ 2: 049de6af0c1d [mq]: 5.patch - test
+ 1: b1b6b0fe0e6d Three (again) - test
+ 0: 4a67dfeea974 [mq]: 1.patch - test
popping 9.patch
now at: 7.patch
==== qpop -a / qpush -a
@@ -894,8 +894,8 @@
applying 5.patch
applying 7.patch
now at: 7.patch
- 3: imported patch 7.patch - john - 13.00
- 2: imported patch 5.patch - test - 11.00
- 1: Three (again) - test - 8.00
- 0: imported patch 1.patch - test - 4.00
+ 3: d26a5b7ffce1 imported patch 7.patch - john - 13.00
+ 2: dda6cf77060a imported patch 5.patch - test - 11.00
+ 1: 25e32d66c8c7 Three (again) - test - 8.00
+ 0: e5011c0211fe imported patch 1.patch - test - 4.00
$ rm -r sandbox
--- a/tests/test-mq-header-from.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-header-from.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,7 +6,7 @@
$ catlog() {
> cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" \
> -e "s/^\(# Parent \).*/\1/"
- > hg log --template "{rev}: {desc} - {author}\n"
+ > hg log --template "{rev}: {node|short} {desc} - {author}\n"
> }
$ runtest() {
> echo ==== init
@@ -122,7 +122,7 @@
> echo ==== "qpop -a / qpush -a"
> hg qpop -a
> hg qpush -a
- > hg log --template "{rev}: {desc} - {author}\n"
+ > hg log --template "{rev}: {node|short} {desc} - {author}\n"
> }
======= plain headers
@@ -135,7 +135,7 @@
==== qnew -U
From: test
- 0: [mq]: 1.patch - test
+ 0: a054644889e5 [mq]: 1.patch - test
==== qref
adding 1
From: test
@@ -145,7 +145,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test
+ 0: 2905f1e46ee0 [mq]: 1.patch - test
==== qref -u
From: mary
@@ -154,7 +154,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - mary
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew
adding 2
diff -r ... 2
@@ -162,8 +162,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - mary
+ 1: 527f98a12a7a [mq]: 2.patch - test
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
From: jane
@@ -172,16 +172,16 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -U -m
From: test
Three
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 0ffa16a9088e Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref
adding 3
From: test
@@ -193,9 +193,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 83f1290c6086 Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -m
From: test
@@ -206,9 +206,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: b0d856328d4d Drei - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
From: mary
@@ -219,9 +219,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - mary
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: bb9d4b28e6a6 Drei - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u -m
From: maria
@@ -232,9 +232,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -m
adding 4of
Four
@@ -244,10 +244,10 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - test
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: b9f922d0da40 Four - test
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
From: jane
@@ -258,21 +258,21 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with HG header
popping 5.patch
now at: 4.patch
now at: 5.patch
# HG changeset patch
# User johndoe
- 4: imported patch 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 72bc4a0467ef imported patch 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 5
# HG changeset patch
@@ -284,11 +284,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: ff5c06112e5a [mq]: 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
# HG changeset patch
# User test
@@ -299,11 +299,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - test
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: c947416c22b1 [mq]: 5.patch - test
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
# HG changeset patch
# User johndeere
@@ -314,23 +314,23 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with plain header
popping 6.patch
now at: 5.patch
now at: 6.patch
From: test
- 5: imported patch 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 7825a18ec839 imported patch 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 6
From: test
@@ -340,12 +340,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
From: test
@@ -354,12 +354,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
From: johndeere
@@ -368,12 +368,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - johndeere
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 32641ee07196 [mq]: 6.patch - johndeere
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qpop -a / qpush -a
popping 6.patch
popping 5.patch
@@ -389,12 +389,12 @@
applying 5.patch
applying 6.patch
now at: 6.patch
- 5: imported patch 6.patch - johndeere
- 4: imported patch 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: imported patch 2.patch - jane
- 0: imported patch 1.patch - mary
+ 5: 1fb083c80457 imported patch 6.patch - johndeere
+ 4: 7e96e969691d imported patch 5.patch - johndeere
+ 3: c7147533d3cd Four - jane
+ 2: b383d04401ea Three (again) - maria
+ 1: fac2da4efc3c imported patch 2.patch - jane
+ 0: b6e237e8771b imported patch 1.patch - mary
$ rm -r sandbox
======= hg headers
@@ -408,7 +408,7 @@
# User test
# Parent
- 0: [mq]: 1.patch - test
+ 0: a054644889e5 [mq]: 1.patch - test
==== qref
adding 1
# HG changeset patch
@@ -420,7 +420,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test
+ 0: 2905f1e46ee0 [mq]: 1.patch - test
==== qref -u
# HG changeset patch
# User mary
@@ -431,7 +431,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - mary
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew
adding 2
# HG changeset patch
@@ -442,8 +442,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - mary
+ 1: 527f98a12a7a [mq]: 2.patch - test
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User jane
@@ -454,17 +454,17 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -U -m
# HG changeset patch
# User test
# Parent
Three
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 0ffa16a9088e Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref
adding 3
# HG changeset patch
@@ -477,9 +477,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 83f1290c6086 Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -m
# HG changeset patch
# User test
@@ -491,9 +491,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: b0d856328d4d Drei - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User mary
@@ -505,9 +505,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - mary
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: bb9d4b28e6a6 Drei - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u -m
# HG changeset patch
# User maria
@@ -519,9 +519,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -m
adding 4of
# HG changeset patch
@@ -533,10 +533,10 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - test
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: b9f922d0da40 Four - test
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User jane
@@ -548,21 +548,21 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with HG header
popping 5.patch
now at: 4.patch
now at: 5.patch
# HG changeset patch
# User johndoe
- 4: imported patch 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 72bc4a0467ef imported patch 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 5
# HG changeset patch
@@ -574,11 +574,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: ff5c06112e5a [mq]: 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
# HG changeset patch
# User test
@@ -589,11 +589,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - test
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: c947416c22b1 [mq]: 5.patch - test
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
# HG changeset patch
# User johndeere
@@ -604,23 +604,23 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with plain header
popping 6.patch
now at: 5.patch
now at: 6.patch
From: test
- 5: imported patch 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 7825a18ec839 imported patch 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 6
From: test
@@ -630,12 +630,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
From: test
@@ -644,12 +644,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
From: johndeere
@@ -658,12 +658,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - johndeere
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 32641ee07196 [mq]: 6.patch - johndeere
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qpop -a / qpush -a
popping 6.patch
popping 5.patch
@@ -679,12 +679,12 @@
applying 5.patch
applying 6.patch
now at: 6.patch
- 5: imported patch 6.patch - johndeere
- 4: imported patch 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: imported patch 2.patch - jane
- 0: imported patch 1.patch - mary
+ 5: 1fb083c80457 imported patch 6.patch - johndeere
+ 4: 7e96e969691d imported patch 5.patch - johndeere
+ 3: c7147533d3cd Four - jane
+ 2: b383d04401ea Three (again) - maria
+ 1: fac2da4efc3c imported patch 2.patch - jane
+ 0: b6e237e8771b imported patch 1.patch - mary
$ rm -r sandbox
$ runtest
==== init
@@ -693,7 +693,7 @@
# User test
# Parent
- 0: [mq]: 1.patch - test
+ 0: a054644889e5 [mq]: 1.patch - test
==== qref
adding 1
# HG changeset patch
@@ -705,7 +705,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - test
+ 0: 2905f1e46ee0 [mq]: 1.patch - test
==== qref -u
# HG changeset patch
# User mary
@@ -716,7 +716,7 @@
+++ b/1
@@ -0,0 +1,1 @@
+1
- 0: [mq]: 1.patch - mary
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew
adding 2
# HG changeset patch
@@ -727,8 +727,8 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - test
- 0: [mq]: 1.patch - mary
+ 1: 527f98a12a7a [mq]: 2.patch - test
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User jane
@@ -739,17 +739,17 @@
+++ b/2
@@ -0,0 +1,1 @@
+2
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -U -m
# HG changeset patch
# User test
# Parent
Three
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 0ffa16a9088e Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref
adding 3
# HG changeset patch
@@ -762,9 +762,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: 83f1290c6086 Three - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -m
# HG changeset patch
# User test
@@ -776,9 +776,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - test
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: b0d856328d4d Drei - test
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User mary
@@ -790,9 +790,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Drei - mary
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: bb9d4b28e6a6 Drei - mary
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u -m
# HG changeset patch
# User maria
@@ -804,9 +804,9 @@
+++ b/3
@@ -0,0 +1,1 @@
+3
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew -m
adding 4of
# HG changeset patch
@@ -818,10 +818,10 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - test
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: b9f922d0da40 Four - test
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qref -u
# HG changeset patch
# User jane
@@ -833,21 +833,21 @@
+++ b/4of
@@ -0,0 +1,1 @@
+4 t
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with HG header
popping 5.patch
now at: 4.patch
now at: 5.patch
# HG changeset patch
# User johndoe
- 4: imported patch 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 72bc4a0467ef imported patch 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 5
# HG changeset patch
@@ -859,11 +859,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndoe
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: ff5c06112e5a [mq]: 5.patch - johndoe
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
# HG changeset patch
# User test
@@ -874,11 +874,11 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - test
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: c947416c22b1 [mq]: 5.patch - test
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
# HG changeset patch
# User johndeere
@@ -889,23 +889,23 @@
+++ b/5
@@ -0,0 +1,1 @@
+5
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qnew with plain header
popping 6.patch
now at: 5.patch
now at: 6.patch
From: test
- 5: imported patch 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 7825a18ec839 imported patch 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref
adding 6
From: test
@@ -915,12 +915,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -U
From: test
@@ -929,12 +929,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - test
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 09d19592680d [mq]: 6.patch - test
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== hg qref -u
From: johndeere
@@ -943,12 +943,12 @@
+++ b/6
@@ -0,0 +1,1 @@
+6
- 5: [mq]: 6.patch - johndeere
- 4: [mq]: 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: [mq]: 2.patch - jane
- 0: [mq]: 1.patch - mary
+ 5: 32641ee07196 [mq]: 6.patch - johndeere
+ 4: 1d898e201a22 [mq]: 5.patch - johndeere
+ 3: 0dfb3111e7ee Four - jane
+ 2: a6686ee84fc3 Three (again) - maria
+ 1: a425cde5f493 [mq]: 2.patch - jane
+ 0: 3682f830e656 [mq]: 1.patch - mary
==== qpop -a / qpush -a
popping 6.patch
popping 5.patch
@@ -964,11 +964,11 @@
applying 5.patch
applying 6.patch
now at: 6.patch
- 5: imported patch 6.patch - johndeere
- 4: imported patch 5.patch - johndeere
- 3: Four - jane
- 2: Three (again) - maria
- 1: imported patch 2.patch - jane
- 0: imported patch 1.patch - mary
+ 5: 1fb083c80457 imported patch 6.patch - johndeere
+ 4: 7e96e969691d imported patch 5.patch - johndeere
+ 3: c7147533d3cd Four - jane
+ 2: b383d04401ea Three (again) - maria
+ 1: fac2da4efc3c imported patch 2.patch - jane
+ 0: b6e237e8771b imported patch 1.patch - mary
$ cd ..
--- a/tests/test-mq-merge.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-merge.t Mon Jan 22 17:53:02 2018 -0500
@@ -58,7 +58,7 @@
Save the patch queue so we can merge it later:
$ hg qsave -c -e
- copy $TESTTMP/t/.hg/patches to $TESTTMP/t/.hg/patches.1 (glob)
+ copy $TESTTMP/t/.hg/patches to $TESTTMP/t/.hg/patches.1
$ checkundo
Update b and commit in an "update" changeset:
@@ -78,7 +78,7 @@
b
$ hg qpush -a -m
- merging with queue at: $TESTTMP/t/.hg/patches.1 (glob)
+ merging with queue at: $TESTTMP/t/.hg/patches.1
applying rm_a
now at: rm_a
@@ -117,14 +117,14 @@
Create the reference queue:
$ hg qsave -c -e -n refqueue
- copy $TESTTMP/t2/.hg/patches to $TESTTMP/t2/.hg/refqueue (glob)
+ copy $TESTTMP/t2/.hg/patches to $TESTTMP/t2/.hg/refqueue
$ hg up -C 1
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
Merge:
$ HGMERGE=internal:other hg qpush -a -m -n refqueue
- merging with queue at: $TESTTMP/t2/.hg/refqueue (glob)
+ merging with queue at: $TESTTMP/t2/.hg/refqueue
applying patcha
patching file a
Hunk #1 succeeded at 2 with fuzz 1 (offset 0 lines).
--- a/tests/test-mq-pull-from-bundle.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-pull-from-bundle.t Mon Jan 22 17:53:02 2018 -0500
@@ -2,7 +2,7 @@
> [extensions]
> mq=
> [alias]
- > tlog = log --template "{rev}: {desc}\\n"
+ > tlog = log --template "{rev}: {node|short} {desc}\\n"
> theads = heads --template "{rev}: {desc}\\n"
> tincoming = incoming --template "{rev}: {desc}\\n"
> EOF
@@ -97,7 +97,7 @@
0: queue: two.patch added
$ hg -R .hg/patches tlog
- 0: queue: two.patch added
+ 0: d7553909353d queue: two.patch added
$ hg qseries
two.patch
@@ -128,7 +128,7 @@
0: queue: two.patch added
$ hg -R .hg/patches tlog
- 0: queue: two.patch added
+ 0: d7553909353d queue: two.patch added
$ hg qseries
two.patch
--- a/tests/test-mq-qnew.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-qnew.t Mon Jan 22 17:53:02 2018 -0500
@@ -117,7 +117,7 @@
abort: patch name cannot begin or end with whitespace
abort: patch name cannot begin or end with whitespace
% qnew with name containing slash
- abort: path ends in directory separator: foo/ (glob)
+ abort: path ends in directory separator: foo/
abort: "foo" already exists as a directory
foo/bar.patch
popping foo/bar.patch
@@ -162,7 +162,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
(no more unresolved files)
abort: cannot manage merge changesets
$ rm -r sandbox
@@ -187,7 +187,7 @@
abort: patch name cannot begin or end with whitespace
abort: patch name cannot begin or end with whitespace
% qnew with name containing slash
- abort: path ends in directory separator: foo/ (glob)
+ abort: path ends in directory separator: foo/
abort: "foo" already exists as a directory
foo/bar.patch
popping foo/bar.patch
@@ -241,7 +241,7 @@
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
(no more unresolved files)
abort: cannot manage merge changesets
$ rm -r sandbox
--- a/tests/test-mq-safety.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-safety.t Mon Jan 22 17:53:02 2018 -0500
@@ -68,7 +68,7 @@
abort: popping would remove a revision not managed by this patch queue
[255]
$ hg qpop -n patches
- using patch queue: $TESTTMP/repo/.hg/patches (glob)
+ using patch queue: $TESTTMP/repo/.hg/patches
abort: popping would remove a revision not managed by this patch queue
[255]
$ hg qrefresh
--- a/tests/test-mq-subrepo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq-subrepo.t Mon Jan 22 17:53:02 2018 -0500
@@ -247,7 +247,7 @@
[255]
$ hg revert sub
reverting subrepo sub
- adding sub/a (glob)
+ adding sub/a
$ hg qpop
popping 1
now at: 0
@@ -266,7 +266,7 @@
[255]
$ hg revert sub
reverting subrepo sub
- adding sub/a (glob)
+ adding sub/a
$ hg qpush
applying 1
subrepository sub diverged (local revision: b2fdb12cd82b, remote revision: aa037b301eba)
--- a/tests/test-mq.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mq.t Mon Jan 22 17:53:02 2018 -0500
@@ -154,7 +154,7 @@
guards
$ cat .hg/patches/series
$ hg qinit -c
- abort: repository $TESTTMP/d/.hg/patches already exists! (glob)
+ abort: repository $TESTTMP/d/.hg/patches already exists!
[255]
$ cd ..
@@ -177,8 +177,8 @@
$ echo status >> .hg/patches/.hgignore
$ echo bleh >> .hg/patches/.hgignore
$ hg qinit -c
- adding .hg/patches/A (glob)
- adding .hg/patches/B (glob)
+ adding .hg/patches/A
+ adding .hg/patches/B
$ hg -R .hg/patches status
A .hgignore
A A
@@ -800,7 +800,7 @@
$ hg strip -f tip
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/770eb8fce608-0ddcae0f-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/770eb8fce608-0ddcae0f-backup.hg
$ cd ..
@@ -1247,7 +1247,7 @@
$ cd qclonesource
$ hg qinit -c
- adding .hg/patches/patch1 (glob)
+ adding .hg/patches/patch1
$ hg qci -m checkpoint
$ qlog
main repo:
@@ -1388,8 +1388,8 @@
$ hg qpush -f --verbose --config 'ui.origbackuppath=.hg/origbackups'
applying empty
- creating directory: $TESTTMP/forcepush/.hg/origbackups (glob)
- saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt (glob)
+ creating directory: $TESTTMP/forcepush/.hg/origbackups
+ saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt
patching file hello.txt
committing files:
hello.txt
--- a/tests/test-mv-cp-st-diff.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-mv-cp-st-diff.t Mon Jan 22 17:53:02 2018 -0500
@@ -1339,7 +1339,7 @@
% hg ci -m t0
created new head
% hg mv x y
- moving x/x to y/x (glob)
+ moving x/x to y/x
% hg ci -m t1
% add y/x x1
% hg ci -m t2
--- a/tests/test-nested-repo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-nested-repo.t Mon Jan 22 17:53:02 2018 -0500
@@ -14,16 +14,16 @@
Should fail:
$ hg st b/x
- abort: path 'b/x' is inside nested repo 'b' (glob)
+ abort: path 'b/x' is inside nested repo 'b'
[255]
$ hg add b/x
- abort: path 'b/x' is inside nested repo 'b' (glob)
+ abort: path 'b/x' is inside nested repo 'b'
[255]
Should fail:
$ hg add b b/x
- abort: path 'b/x' is inside nested repo 'b' (glob)
+ abort: path 'b/x' is inside nested repo 'b'
[255]
$ hg st
@@ -37,7 +37,7 @@
Should fail:
$ hg mv a b
- abort: path 'b/a' is inside nested repo 'b' (glob)
+ abort: path 'b/a' is inside nested repo 'b'
[255]
$ hg st
--- a/tests/test-notify-changegroup.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-notify-changegroup.t Mon Jan 22 17:53:02 2018 -0500
@@ -56,11 +56,11 @@
Message-Id: <*> (glob)
To: baz, foo@bar
- changeset cb9a9f314b8b in $TESTTMP/a (glob)
+ changeset cb9a9f314b8b in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=cb9a9f314b8b
summary: a
- changeset ba677d0156c1 in $TESTTMP/a (glob)
+ changeset ba677d0156c1 in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=ba677d0156c1
summary: b
@@ -109,11 +109,11 @@
Message-Id: <*> (glob)
To: baz, foo@bar
- changeset cb9a9f314b8b in $TESTTMP/a (glob)
+ changeset cb9a9f314b8b in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=cb9a9f314b8b
summary: a
- changeset ba677d0156c1 in $TESTTMP/a (glob)
+ changeset ba677d0156c1 in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=ba677d0156c1
summary: b
@@ -186,19 +186,19 @@
Message-Id: <*> (glob)
To: baz, foo@bar
- changeset 84e487dddc58 in $TESTTMP/a (glob)
+ changeset 84e487dddc58 in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=84e487dddc58
summary: newfile
- changeset b29c7a2b6b0c in $TESTTMP/a (glob)
+ changeset b29c7a2b6b0c in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=b29c7a2b6b0c
summary: x
- changeset 0957c7d64886 in $TESTTMP/a (glob)
+ changeset 0957c7d64886 in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=0957c7d64886
summary: y
- changeset 485b4e6b0249 in $TESTTMP/a (glob)
+ changeset 485b4e6b0249 in $TESTTMP/a
details: $TESTTMP/a?cmd=changeset;node=485b4e6b0249
summary: merged
--- a/tests/test-notify.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-notify.t Mon Jan 22 17:53:02 2018 -0500
@@ -199,7 +199,7 @@
Message-Id: <*> (glob)
To: baz, foo@bar
- changeset 0647d048b600 in $TESTTMP/b (glob)
+ changeset 0647d048b600 in $TESTTMP/b
details: $TESTTMP/b?cmd=changeset;node=0647d048b600
description: b
@@ -563,7 +563,7 @@
Message-Id: <hg.f5e8ec95bf59.*.*@*> (glob)
To: baz@test.com, foo@bar
- changeset f5e8ec95bf59 in $TESTTMP/b (glob)
+ changeset f5e8ec95bf59 in $TESTTMP/b
details: http://test/b?cmd=changeset;node=f5e8ec95bf59
description: default template
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obshistory.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,564 @@
+This test file test the various messages when accessing obsolete
+revisions.
+
+Global setup
+============
+
+ $ . $TESTDIR/testlib/obsmarker-common.sh
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > interactive = true
+ > [phases]
+ > publish=False
+ > [experimental]
+ > evolution.createmarkers = yes
+ > evolution.effect-flags = yes
+ > EOF
+
+Test output on amended commit
+=============================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-amend
+ $ cd $TESTTMP/local-amend
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ echo 42 >> A0
+ $ hg commit --amend -m "A1
+ >
+ > Better commit message"
+ $ hg log --hidden -G
+ @ changeset: 2:4ae3a4151de9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend as 2:4ae3a4151de9
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Actual test
+-----------
+ $ hg update 471f378eab4c
+ abort: hidden revision '471f378eab4c' was rewritten as: 4ae3a4151de9!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden "desc(A0)"
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: 4ae3a4151de9)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with pruned commit
+==============================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-prune
+ $ cd $TESTTMP/local-prune
+ $ mkcommit ROOT
+ $ mkcommit A0 # 0
+ $ mkcommit B0 # 1
+ $ hg log --hidden -G
+ @ changeset: 2:0dec01379d3b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg debugobsolete --record-parents `getid 'desc(B0)'`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 2:0dec01379d3b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: pruned
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+Actual test
+-----------
+ $ hg up 1
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg up 0dec01379d3b
+ abort: hidden revision '0dec01379d3b' is pruned!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg up --hidden -r 'desc(B0)'
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' is pruned)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with splitted commit
+================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-split
+ $ cd $TESTTMP/local-split
+ $ mkcommit ROOT
+ $ echo 42 >> a
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding a
+ adding b
+ $ hg log --hidden -G
+ @ changeset: 1:471597cad322
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate a split
+ $ hg up 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+
+ $ echo 42 >> a
+ $ hg commit -A -m "A0"
+ adding a
+ created new head
+
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding b
+
+ $ hg debugobsolete `getid '1'` `getid '2'` `getid '3'`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 3:f257fde29c7a
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 2:337fec4d2edc
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ | x changeset: 1:471597cad322
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: split as 2:337fec4d2edc, 3:f257fde29c7a
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Actual test
+-----------
+ $ hg update 471597cad322
+ abort: hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'min(desc(A0))'
+ updating to a hidden changeset 471597cad322
+ (hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with lots of splitted commit
+========================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-lots-split
+ $ cd $TESTTMP/local-lots-split
+ $ mkcommit ROOT
+ $ echo 42 >> a
+ $ echo 43 >> b
+ $ echo 44 >> c
+ $ echo 45 >> d
+ $ hg commit -A -m "A0"
+ adding a
+ adding b
+ adding c
+ adding d
+ $ hg log --hidden -G
+ @ changeset: 1:de7290d8b885
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate a split
+ $ hg up 0
+ 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
+
+ $ echo 42 >> a
+ $ hg commit -A -m "A0"
+ adding a
+ created new head
+
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding b
+
+ $ echo 44 >> c
+ $ hg commit -A -m "A0"
+ adding c
+
+ $ echo 45 >> d
+ $ hg commit -A -m "A0"
+ adding d
+
+ $ hg debugobsolete `getid '1'` `getid '2'` `getid '3'` `getid '4'` `getid '5'`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 5:c7f044602e9b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 4:1ae8bc733a14
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 3:f257fde29c7a
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 2:337fec4d2edc
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ | x changeset: 1:de7290d8b885
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: split as 2:337fec4d2edc, 3:f257fde29c7a, 4:1ae8bc733a14, 5:c7f044602e9b
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Actual test
+-----------
+ $ hg update de7290d8b885
+ abort: hidden revision 'de7290d8b885' was split as: 337fec4d2edc, f257fde29c7a and 2 more!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'min(desc(A0))'
+ updating to a hidden changeset de7290d8b885
+ (hidden revision 'de7290d8b885' was split as: 337fec4d2edc, f257fde29c7a and 2 more)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with folded commit
+==============================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-fold
+ $ cd $TESTTMP/local-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg log --hidden -G
+ @ changeset: 2:0dec01379d3b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate a fold
+ $ hg up 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg add A0 B0
+ $ hg commit -m "C0"
+ created new head
+
+ $ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(C0)'`
+ obsoleted 1 changesets
+ 1 new orphan changesets
+ $ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(C0)'`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 3:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 2:0dec01379d3b
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | obsolete: rewritten as 3:eb5a0daa2192
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten as 3:eb5a0daa2192
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ Actual test
+ -----------
+ $ hg update 471f378eab4c
+ abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'desc(A0)'
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg update 0dec01379d3b
+ abort: hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'desc(B0)'
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with divergence
+===========================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-divergence
+ $ cd $TESTTMP/local-divergence
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg commit --amend -m "A1"
+ $ hg log --hidden -G
+ @ changeset: 2:fdf9bde5129a
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend as 2:fdf9bde5129a
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg update --hidden 'desc(A0)'
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: fdf9bde5129a)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit --amend -m "A2"
+ 2 new content-divergent changesets
+ $ hg log --hidden -G
+ @ changeset: 3:65b757b745b9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | instability: content-divergent
+ | summary: A2
+ |
+ | * changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | instability: content-divergent
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend as 2:fdf9bde5129a
+ | obsolete: rewritten using amend as 3:65b757b745b9
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Actual test
+-----------
+ $ hg update 471f378eab4c
+ abort: hidden revision '471f378eab4c' has diverged!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'desc(A0)'
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' has diverged)
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test output with amended + folded commit
+========================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/local-amend-fold
+ $ cd $TESTTMP/local-amend-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg commit --amend -m "B1"
+ $ hg log --hidden -G
+ @ changeset: 3:b7ea6d14e664
+ | tag: tip
+ | parent: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B1
+ |
+ | x changeset: 2:0dec01379d3b
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend as 3:b7ea6d14e664
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+# Simulate a fold
+ $ hg up 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg add A0 B0
+ $ hg commit -m "C0"
+ created new head
+
+ $ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(C0)'`
+ obsoleted 1 changesets
+ 1 new orphan changesets
+ $ hg debugobsolete `getid 'desc(B1)'` `getid 'desc(C0)'`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 4:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 3:b7ea6d14e664
+ | | parent: 1:471f378eab4c
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | obsolete: rewritten as 4:eb5a0daa2192
+ | | summary: B1
+ | |
+ | | x changeset: 2:0dec01379d3b
+ | |/ user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | obsolete: rewritten using amend as 3:b7ea6d14e664
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten as 4:eb5a0daa2192
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ Actual test
+ -----------
+ $ hg update 471f378eab4c
+ abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192!
+ (use --hidden to access hidden revisions)
+ [255]
+ $ hg update --hidden 'desc(A0)'
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg update --hidden 0dec01379d3b
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg update 0dec01379d3b
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg update --hidden 'desc(B0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-obsmarker-template.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsmarker-template.t Mon Jan 22 17:53:02 2018 -0500
@@ -13,7 +13,7 @@
> evolution=true
> [templates]
> obsfatesuccessors = "{if(successors, " as ")}{join(successors, ", ")}"
- > obsfateverb = "{obsfateverb(successors)}"
+ > obsfateverb = "{obsfateverb(successors, markers)}"
> obsfateoperations = "{if(obsfateoperations(markers), " using {join(obsfateoperations(markers), ", ")}")}"
> obsfateusers = "{if(obsfateusers(markers), " by {join(obsfateusers(markers), ", ")}")}"
> obsfatedate = "{if(obsfatedate(markers), "{ifeq(min(obsfatedate(markers)), max(obsfatedate(markers)), " (at {min(obsfatedate(markers))|isodate})", " (between {min(obsfatedate(markers))|isodate} and {max(obsfatedate(markers))|isodate})")}")}"
@@ -75,6 +75,8 @@
Check templates
---------------
$ hg up 'desc(A0)' --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: d004c8f274b9)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
@@ -146,6 +148,8 @@
summary: ROOT
$ hg up 'desc(A1)' --hidden
+ updating to a hidden changeset a468dc9b3633
+ (hidden revision 'a468dc9b3633' was rewritten as: d004c8f274b9)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
@@ -248,9 +252,9 @@
@ d004c8f274b9
|
| x a468dc9b3633
- |/ Obsfate: [{"markers": [["a468dc9b36338b14fdb7825f55ce3df4e71517ad", ["d004c8f274b9ec480a47a93c10dac5eee63adb78"], 0, [["operation", "amend"], ["user", "test2"]], [987654321.0, 0], null]], "successors": ["d004c8f274b9ec480a47a93c10dac5eee63adb78"]}]
+ |/ Obsfate: [{"markers": [["a468dc9b36338b14fdb7825f55ce3df4e71517ad", ["d004c8f274b9ec480a47a93c10dac5eee63adb78"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test2"]], [987654321.0, 0], null]], "successors": ["d004c8f274b9ec480a47a93c10dac5eee63adb78"]}]
| x 471f378eab4c
- |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"], 0, [["operation", "amend"], ["user", "test"]], [1234567890.0, 0], null]], "successors": ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]}]
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"], 0, [["ef1", "9"], ["operation", "amend"], ["user", "test"]], [1234567890.0, 0], null]], "successors": ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]}]
o ea207398892e
@@ -413,6 +417,8 @@
---------------
$ hg up 'obsolete()' --hidden
+ updating to a hidden changeset 471597cad322
+ (hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
@@ -588,6 +594,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
obsoleted 1 changesets
@@ -620,6 +627,8 @@
---------------
$ hg up 'desc(A0)' --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
@@ -644,6 +653,8 @@
o ea207398892e
$ hg up 'desc(B0)' --hidden
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show both predecessors as they should be both
@@ -809,8 +820,11 @@
summary: ROOT
$ hg update --hidden 'desc(A0)'
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: fdf9bde5129a)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "A2"
+ 2 new content-divergent changesets
$ hg log --hidden -G
@ changeset: 3:65b757b745b9
| tag: tip
@@ -820,7 +834,7 @@
| instability: content-divergent
| summary: A2
|
- | o changeset: 2:fdf9bde5129a
+ | * changeset: 2:fdf9bde5129a
|/ parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -856,7 +870,7 @@
| obsolete: rewritten using amend as 4:019fadeab383
| summary: A2
|
- | o changeset: 2:fdf9bde5129a
+ | * changeset: 2:fdf9bde5129a
|/ parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -880,16 +894,18 @@
---------------
$ hg up 'desc(A0)' --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' has diverged)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
- o 019fadeab383
+ * 019fadeab383
| Predecessors: 1:471f378eab4c
| semi-colon: 1:471f378eab4c
| json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
| map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
- | o fdf9bde5129a
+ | * fdf9bde5129a
|/ Predecessors: 1:471f378eab4c
| semi-colon: 1:471f378eab4c
| json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
@@ -902,9 +918,9 @@
o ea207398892e
$ hg fatelog
- o 019fadeab383
+ * 019fadeab383
|
- | o fdf9bde5129a
+ | * fdf9bde5129a
|/
| @ 471f378eab4c
|/ Obsfate: rewritten using amend as 2:fdf9bde5129a by test (at 1970-01-01 00:00 +0000); rewritten using amend as 4:019fadeab383 by test (at 1970-01-01 00:00 +0000);
@@ -916,7 +932,7 @@
Predecessors template should not show predecessors as they are not displayed in
the log
$ hg tlog
- o 019fadeab383
+ * 019fadeab383
|
| @ fdf9bde5129a
|/
@@ -924,7 +940,7 @@
$ hg fatelog
- o 019fadeab383
+ * 019fadeab383
|
| @ fdf9bde5129a
|/
@@ -933,7 +949,7 @@
Predecessors template should the predecessors as we force their display with
--hidden
$ hg tlog --hidden
- o 019fadeab383
+ * 019fadeab383
| Predecessors: 3:65b757b745b9
| semi-colon: 3:65b757b745b9
| json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
@@ -960,7 +976,7 @@
$ hg fatelog --hidden
- o 019fadeab383
+ * 019fadeab383
|
| x 65b757b745b9
|/ Obsfate: rewritten using amend as 4:019fadeab383 by test (at 1970-01-01 00:00 +0000);
@@ -972,14 +988,14 @@
$ hg fatelogjson --hidden
- o 019fadeab383
+ * 019fadeab383
|
| x 65b757b745b9
- |/ Obsfate: [{"markers": [["65b757b745b935093c87a2bccd877521cccffcbd", ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]}]
+ |/ Obsfate: [{"markers": [["65b757b745b935093c87a2bccd877521cccffcbd", ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]}]
| @ fdf9bde5129a
|/
| x 471f378eab4c
- |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"]}, {"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["65b757b745b935093c87a2bccd877521cccffcbd"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["65b757b745b935093c87a2bccd877521cccffcbd"]}]
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"]}, {"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["65b757b745b935093c87a2bccd877521cccffcbd"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["65b757b745b935093c87a2bccd877521cccffcbd"]}]
o ea207398892e
@@ -987,7 +1003,7 @@
-----------------------------------
$ hg fatelogkw --hidden -q
- o 019fadeab383
+ * 019fadeab383
|
| x 65b757b745b9
|/ Obsfate: rewritten using amend as 4:019fadeab383
@@ -999,7 +1015,7 @@
o ea207398892e
$ hg fatelogkw --hidden
- o 019fadeab383
+ * 019fadeab383
|
| x 65b757b745b9
|/ Obsfate: rewritten using amend as 4:019fadeab383
@@ -1011,7 +1027,7 @@
o ea207398892e
$ hg fatelogkw --hidden -v
- o 019fadeab383
+ * 019fadeab383
|
| x 65b757b745b9
|/ Obsfate: rewritten using amend as 4:019fadeab383 by test (at 1970-01-01 00:00 +0000)
@@ -1023,7 +1039,7 @@
o ea207398892e
$ hg log -G -T "default" --hidden
- o changeset: 4:019fadeab383
+ * changeset: 4:019fadeab383
| tag: tip
| parent: 0:ea207398892e
| user: test
@@ -1105,6 +1121,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
obsoleted 1 changesets
@@ -1144,6 +1161,8 @@
---------------
$ hg up 'desc(A0)' --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
@@ -1168,6 +1187,8 @@
o ea207398892e
$ hg up 'desc(B0)' --hidden
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should both predecessors as they are visible
@@ -1198,6 +1219,8 @@
o ea207398892e
$ hg up 'desc(B1)' --hidden
+ updating to a hidden changeset b7ea6d14e664
+ (hidden revision 'b7ea6d14e664' was rewritten as: eb5a0daa2192)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should both predecessors as they are visible
@@ -1287,7 +1310,7 @@
| x b7ea6d14e664
| | Obsfate: [{"markers": [["b7ea6d14e664bdc8922221f7992631b50da3fb07", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
| | x 0dec01379d3b
- | |/ Obsfate: [{"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["b7ea6d14e664bdc8922221f7992631b50da3fb07"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b7ea6d14e664bdc8922221f7992631b50da3fb07"]}]
+ | |/ Obsfate: [{"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["b7ea6d14e664bdc8922221f7992631b50da3fb07"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b7ea6d14e664bdc8922221f7992631b50da3fb07"]}]
| x 471f378eab4c
|/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
o ea207398892e
@@ -1419,7 +1442,7 @@
$ cd $TESTTMP/templates-local-remote-markers-2
$ hg pull
- pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
+ pulling from $TESTTMP/templates-local-remote-markers-1
searching for changes
adding changesets
adding manifests
@@ -1450,8 +1473,8 @@
$ hg debugobsolete
- 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
Check templates
---------------
@@ -1579,6 +1602,7 @@
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
@@ -1599,6 +1623,8 @@
$ hg up -r "desc(B0)" --hidden
+ updating to a hidden changeset 0dec01379d3b
+ (hidden revision '0dec01379d3b' is pruned)
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg tlog
o f897c6137566
@@ -1863,10 +1889,12 @@
$ hg up 6
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg commit --amend -m "Add only B"
+ 1 new orphan changesets
$ hg up 6 --hidden
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "Add B only"
+ 4 new content-divergent changesets
$ hg log -G
@ changeset: 9:0b997eb7ceee
@@ -1877,14 +1905,14 @@
| instability: content-divergent
| summary: Add B only
|
- | o changeset: 8:b18bc8331526
+ | * changeset: 8:b18bc8331526
|/ parent: 5:dd800401bd8c
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| instability: content-divergent
| summary: Add only B
|
- | o changeset: 7:ba2ed02b0c9a
+ | * changeset: 7:ba2ed02b0c9a
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
| | instability: orphan, content-divergent
@@ -1897,7 +1925,7 @@
| obsolete: rewritten using amend as 9:0b997eb7ceee
| summary: Add A,B,C
|
- o changeset: 5:dd800401bd8c
+ * changeset: 5:dd800401bd8c
| parent: 3:f897c6137566
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -1925,19 +1953,19 @@
| semi-colon: 6:4a004186e638
| json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
| map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
- | o b18bc8331526
+ | * b18bc8331526
|/ Predecessors: 6:4a004186e638
| semi-colon: 6:4a004186e638
| json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
| map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
- | o ba2ed02b0c9a
+ | * ba2ed02b0c9a
| |
| x 4a004186e638
|/ Successors: 8:b18bc8331526; 9:0b997eb7ceee
| multi-line: 8:b18bc8331526
| multi-line: 9:0b997eb7ceee
| json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
- o dd800401bd8c
+ * dd800401bd8c
|
o f897c6137566
|
@@ -1946,13 +1974,13 @@
$ hg fatelog
@ 0b997eb7ceee
|
- | o b18bc8331526
+ | * b18bc8331526
|/
- | o ba2ed02b0c9a
+ | * ba2ed02b0c9a
| |
| x 4a004186e638
|/ Obsfate: rewritten using amend as 8:b18bc8331526 by test (at 1970-01-01 00:00 +0000); rewritten using amend as 9:0b997eb7ceee by test (at 1970-01-01 00:00 +0000);
- o dd800401bd8c
+ * dd800401bd8c
|
o f897c6137566
|
@@ -1964,12 +1992,12 @@
| semi-colon: 6:4a004186e638
| json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
| map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
- | o b18bc8331526
+ | * b18bc8331526
|/ Predecessors: 6:4a004186e638
| semi-colon: 6:4a004186e638
| json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
| map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
- | o ba2ed02b0c9a
+ | * ba2ed02b0c9a
| | Predecessors: 4:9bd10a0775e4
| | semi-colon: 4:9bd10a0775e4
| | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
@@ -1983,7 +2011,7 @@
| multi-line: 8:b18bc8331526
| multi-line: 9:0b997eb7ceee
| json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
- o dd800401bd8c
+ * dd800401bd8c
| Predecessors: 4:9bd10a0775e4
| semi-colon: 4:9bd10a0775e4
| json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
@@ -2019,13 +2047,13 @@
$ hg fatelog --hidden
@ 0b997eb7ceee
|
- | o b18bc8331526
+ | * b18bc8331526
|/
- | o ba2ed02b0c9a
+ | * ba2ed02b0c9a
| |
| x 4a004186e638
|/ Obsfate: rewritten using amend as 8:b18bc8331526 by test (at 1970-01-01 00:00 +0000); rewritten using amend as 9:0b997eb7ceee by test (at 1970-01-01 00:00 +0000);
- o dd800401bd8c
+ * dd800401bd8c
|
| x 9bd10a0775e4
|/ Obsfate: split as 5:dd800401bd8c, 6:4a004186e638, 7:ba2ed02b0c9a by test (at 1970-01-01 00:00 +0000);
@@ -2040,13 +2068,13 @@
$ hg fatelogjson --hidden
@ 0b997eb7ceee
|
- | o b18bc8331526
+ | * b18bc8331526
|/
- | o ba2ed02b0c9a
+ | * ba2ed02b0c9a
| |
| x 4a004186e638
- |/ Obsfate: [{"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["b18bc8331526a22cbb1801022bd1555bf291c48b"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b18bc8331526a22cbb1801022bd1555bf291c48b"]}, {"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["0b997eb7ceeee06200a02f8aab185979092d514e"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["0b997eb7ceeee06200a02f8aab185979092d514e"]}]
- o dd800401bd8c
+ |/ Obsfate: [{"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["b18bc8331526a22cbb1801022bd1555bf291c48b"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b18bc8331526a22cbb1801022bd1555bf291c48b"]}, {"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["0b997eb7ceeee06200a02f8aab185979092d514e"], 0, [["ef1", "1"], ["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["0b997eb7ceeee06200a02f8aab185979092d514e"]}]
+ * dd800401bd8c
|
| x 9bd10a0775e4
|/ Obsfate: [{"markers": [["9bd10a0775e478708cada5f176ec6de654359ce7", ["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]}]
@@ -2059,26 +2087,28 @@
o ea207398892e
$ hg up --hidden 4
+ updating to a hidden changeset 9bd10a0775e4
+ (hidden revision '9bd10a0775e4' has diverged)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg rebase -r 7 -d 8 --config extensions.rebase=
rebasing 7:ba2ed02b0c9a "Add A,B,C"
$ hg tlog
- o eceed8f98ffc
+ * eceed8f98ffc
| Predecessors: 4:9bd10a0775e4
| semi-colon: 4:9bd10a0775e4
| json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
| map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
- | o 0b997eb7ceee
+ | * 0b997eb7ceee
| | Predecessors: 4:9bd10a0775e4
| | semi-colon: 4:9bd10a0775e4
| | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
| | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
- o | b18bc8331526
+ * | b18bc8331526
|/ Predecessors: 4:9bd10a0775e4
| semi-colon: 4:9bd10a0775e4
| json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
| map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
- o dd800401bd8c
+ * dd800401bd8c
| Predecessors: 4:9bd10a0775e4
| semi-colon: 4:9bd10a0775e4
| json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
@@ -2094,13 +2124,13 @@
$ hg fatelog
- o eceed8f98ffc
+ * eceed8f98ffc
|
- | o 0b997eb7ceee
+ | * 0b997eb7ceee
| |
- o | b18bc8331526
+ * | b18bc8331526
|/
- o dd800401bd8c
+ * dd800401bd8c
|
| @ 9bd10a0775e4
|/ Obsfate: split using amend, rebase as 5:dd800401bd8c, 9:0b997eb7ceee, 10:eceed8f98ffc by test (at 1970-01-01 00:00 +0000); split using amend, rebase as 5:dd800401bd8c, 8:b18bc8331526, 10:eceed8f98ffc by test (at 1970-01-01 00:00 +0000);
@@ -2112,18 +2142,18 @@
-----------------------------------
$ hg fatelogkw --hidden -q
- o eceed8f98ffc
+ * eceed8f98ffc
|
- | o 0b997eb7ceee
+ | * 0b997eb7ceee
| |
- o | b18bc8331526
+ * | b18bc8331526
|/
| x ba2ed02b0c9a
| | Obsfate: rewritten using rebase as 10:eceed8f98ffc
| x 4a004186e638
|/ Obsfate: rewritten using amend as 8:b18bc8331526
| Obsfate: rewritten using amend as 9:0b997eb7ceee
- o dd800401bd8c
+ * dd800401bd8c
|
| @ 9bd10a0775e4
|/ Obsfate: split as 5:dd800401bd8c, 6:4a004186e638, 7:ba2ed02b0c9a
@@ -2137,18 +2167,18 @@
o ea207398892e
$ hg fatelogkw --hidden
- o eceed8f98ffc
+ * eceed8f98ffc
|
- | o 0b997eb7ceee
+ | * 0b997eb7ceee
| |
- o | b18bc8331526
+ * | b18bc8331526
|/
| x ba2ed02b0c9a
| | Obsfate: rewritten using rebase as 10:eceed8f98ffc
| x 4a004186e638
|/ Obsfate: rewritten using amend as 8:b18bc8331526
| Obsfate: rewritten using amend as 9:0b997eb7ceee
- o dd800401bd8c
+ * dd800401bd8c
|
| @ 9bd10a0775e4
|/ Obsfate: split as 5:dd800401bd8c, 6:4a004186e638, 7:ba2ed02b0c9a
@@ -2162,18 +2192,18 @@
o ea207398892e
$ hg fatelogkw --hidden -v
- o eceed8f98ffc
+ * eceed8f98ffc
|
- | o 0b997eb7ceee
+ | * 0b997eb7ceee
| |
- o | b18bc8331526
+ * | b18bc8331526
|/
| x ba2ed02b0c9a
| | Obsfate: rewritten using rebase as 10:eceed8f98ffc by test (at 1970-01-01 00:00 +0000)
| x 4a004186e638
|/ Obsfate: rewritten using amend as 8:b18bc8331526 by test (at 1970-01-01 00:00 +0000)
| Obsfate: rewritten using amend as 9:0b997eb7ceee by test (at 1970-01-01 00:00 +0000)
- o dd800401bd8c
+ * dd800401bd8c
|
| @ 9bd10a0775e4
|/ Obsfate: split as 5:dd800401bd8c, 6:4a004186e638, 7:ba2ed02b0c9a by test (at 1970-01-01 00:00 +0000)
@@ -2187,7 +2217,7 @@
o ea207398892e
$ hg log -G -T "default" --hidden
- o changeset: 10:eceed8f98ffc
+ * changeset: 10:eceed8f98ffc
| tag: tip
| parent: 8:b18bc8331526
| user: test
@@ -2195,14 +2225,14 @@
| instability: content-divergent
| summary: Add A,B,C
|
- | o changeset: 9:0b997eb7ceee
+ | * changeset: 9:0b997eb7ceee
| | parent: 5:dd800401bd8c
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
| | instability: content-divergent
| | summary: Add B only
| |
- o | changeset: 8:b18bc8331526
+ * | changeset: 8:b18bc8331526
|/ parent: 5:dd800401bd8c
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -2222,7 +2252,7 @@
| obsolete: rewritten using amend as 9:0b997eb7ceee
| summary: Add A,B,C
|
- o changeset: 5:dd800401bd8c
+ * changeset: 5:dd800401bd8c
| parent: 3:f897c6137566
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -2303,6 +2333,8 @@
obsoleted 1 changesets
$ hg up -r "desc(A0)" --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' is pruned)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "A2"
$ hg debugobsolete --record-parent `getid "."`
@@ -2312,6 +2344,8 @@
------------
$ hg up "desc(A0)" --hidden
+ updating to a hidden changeset 471f378eab4c
+ (hidden revision '471f378eab4c' is pruned)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg tlog
@ 471f378eab4c
@@ -2465,6 +2499,8 @@
---------------
$ hg up 'desc("A0")' --hidden
+ updating to a hidden changeset 471597cad322
+ (hidden revision '471597cad322' is pruned)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
# todo: the obsfate output is not ideal
@@ -2476,6 +2512,8 @@
o ea207398892e
$ hg up -r 'desc("A2")' --hidden
+ updating to a hidden changeset 0d0ef4bdf70e
+ (hidden revision '0d0ef4bdf70e' is pruned)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg fatelog --hidden
--- a/tests/test-obsolete-bundle-strip.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete-bundle-strip.t Mon Jan 22 17:53:02 2018 -0500
@@ -158,7 +158,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg
### Backup markers ###
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
### diff <relevant> <backed-up> ###
@@ -189,7 +189,7 @@
### Exclusive markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -229,7 +229,7 @@
84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -272,6 +272,7 @@
$ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
$ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
obsoleted 1 changesets
$ hg up 'desc("ROOT")'
@@ -324,7 +325,7 @@
#################################
### Exclusive markers ###
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/29f93b1df87b-7fb32101-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/29f93b1df87b-7fb32101-backup.hg
### Backup markers ###
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
### diff <relevant> <backed-up> ###
@@ -356,7 +357,7 @@
#################################
### Exclusive markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg
### Backup markers ###
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -417,7 +418,7 @@
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-884c33b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-884c33b0-backup.hg
### Backup markers ###
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -489,7 +490,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -520,7 +521,7 @@
#################################
### Exclusive markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -558,7 +559,7 @@
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -601,6 +602,7 @@
$ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
$ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
obsoleted 1 changesets
@@ -641,7 +643,7 @@
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/missing-prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/missing-prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg
### Backup markers ###
29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -721,7 +723,7 @@
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/prune-inline-missing/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/prune-inline-missing/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg
### Backup markers ###
84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -770,8 +772,10 @@
$ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-B")'` `getid 'desc("C-C")'` # record split
obsoleted 1 changesets
$ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-D")'` # other divergent
+ 3 new content-divergent changesets
$ hg debugobsolete `getid 'desc("C-A")'` b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0
$ hg debugobsolete b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 `getid 'desc("C-E")'`
+ 1 new content-divergent changesets
$ hg debugobsolete `getid 'desc("C-B")'` `getid 'desc("C-E")'`
obsoleted 1 changesets
$ hg debugobsolete `getid 'desc("C-C")'` `getid 'desc("C-E")'`
@@ -820,7 +824,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-81204eba-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-81204eba-backup.hg
### Backup markers ###
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
### diff <relevant> <backed-up> ###
@@ -847,7 +851,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-7465d6e9-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-7465d6e9-backup.hg
### Backup markers ###
9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -875,7 +879,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/27ec657ca21d-d5dd1c7c-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/27ec657ca21d-d5dd1c7c-backup.hg
### Backup markers ###
9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -903,7 +907,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/06dc9da25ef0-9b1c0a91-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/06dc9da25ef0-9b1c0a91-backup.hg
### Backup markers ###
9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -951,7 +955,8 @@
a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-8adeb22d-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-8adeb22d-backup.hg
+ 3 new content-divergent changesets
### Backup markers ###
06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1000,7 +1005,7 @@
### diff <relevant> <bundled> ###
#################################
### Exclusive markers ###
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-0daf625a-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-0daf625a-backup.hg
### Backup markers ###
9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1052,7 +1057,7 @@
a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-bf1b80f4-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-bf1b80f4-backup.hg
### Backup markers ###
06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1120,7 +1125,8 @@
a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-36b6476a-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-36b6476a-backup.hg
+ 3 new content-divergent changesets
### Backup markers ###
06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1188,7 +1194,7 @@
a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-5fdfcd7d-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-5fdfcd7d-backup.hg
### Backup markers ###
06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1262,7 +1268,7 @@
a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-eeb4258f-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-eeb4258f-backup.hg
### Backup markers ###
06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1352,7 +1358,7 @@
#################################
### Exclusive markers ###
cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/cefb651fc2fd-345c8dfa-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/cefb651fc2fd-345c8dfa-backup.hg
### Backup markers ###
cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
### diff <relevant> <backed-up> ###
@@ -1380,7 +1386,7 @@
#################################
### Exclusive markers ###
cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/9ac430e15fca-b9855b02-backup.hg (glob)
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/9ac430e15fca-b9855b02-backup.hg
### Backup markers ###
cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
### diff <relevant> <backed-up> ###
--- a/tests/test-obsolete-changeset-exchange.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete-changeset-exchange.t Mon Jan 22 17:53:02 2018 -0500
@@ -71,6 +71,7 @@
adding manifests
adding file changes
added 1 changesets with 0 changes to 1 files (+1 heads)
+ 1 new phase-divergent changesets
new changesets f89bcc95eba5
(run 'hg heads' to see heads, 'hg merge' to merge)
@@ -142,7 +143,7 @@
$ cd pull-hidden-common-client
$ hg pull --debug
- pulling from $TESTTMP/pull-hidden-common (glob)
+ pulling from $TESTTMP/pull-hidden-common
query 1; heads
searching for changes
taking quick initial sample
@@ -170,6 +171,6 @@
bundle2-input-part: total payload size 24
bundle2-input-bundle: 2 parts total
checking for updated bookmarks
+ updating the branch cache
new changesets bec0734cd68e
- updating the branch cache
(run 'hg heads' to see heads, 'hg merge' to merge)
--- a/tests/test-obsolete-checkheads.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete-checkheads.t Mon Jan 22 17:53:02 2018 -0500
@@ -37,7 +37,7 @@
$ mkcommit old
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -61,7 +61,7 @@
Push should not warn about creating new head
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -77,7 +77,7 @@
$ cp -R ../backup1 ../remote
$ hg -R ../remote phase --public c70b08862e08
$ hg pull -v
- pulling from $TESTTMP/remote (glob)
+ pulling from $TESTTMP/remote
searching for changes
no changes found
$ hg log -G --hidden
@@ -91,7 +91,7 @@
Abort: old will still be an head because it's public.
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
abort: push creates new remote head 71e3228bffe1!
(merge or see 'hg help push' for details about pushing new heads)
@@ -151,7 +151,7 @@
Push should abort on new head
$ hg push -r 'desc("other")'
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
abort: push creates new remote head d7d41ccbd4de!
(merge or see 'hg help push' for details about pushing new heads)
@@ -178,7 +178,7 @@
$ mkcommit new
created new head
$ hg push -f
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -214,7 +214,7 @@
one anyway.
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -260,7 +260,7 @@
We do not have enought data to take the right decision, we should fail
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
remote has heads on branch 'default' that are not known locally: c70b08862e08
abort: push creates new remote head 71e3228bffe1!
@@ -270,7 +270,7 @@
Pulling the missing data makes it work
$ hg pull
- pulling from $TESTTMP/remote (glob)
+ pulling from $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -278,7 +278,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
(run 'hg heads' to see heads)
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
adding changesets
adding manifests
@@ -309,7 +309,7 @@
$ hg push
- pushing to $TESTTMP/remote (glob)
+ pushing to $TESTTMP/remote
searching for changes
abort: push creates new remote head 350a93b716be!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-obsolete-distributed.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete-distributed.t Mon Jan 22 17:53:02 2018 -0500
@@ -136,7 +136,7 @@
$ hg up 'desc("ROOT")'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg pull
- pulling from $TESTTMP/distributed-chain-building/server (glob)
+ pulling from $TESTTMP/distributed-chain-building/server
searching for changes
adding changesets
adding manifests
@@ -174,7 +174,7 @@
$ hg rollback
repository tip rolled back to revision 3 (undo pull)
$ hg push -f
- pushing to $TESTTMP/distributed-chain-building/server (glob)
+ pushing to $TESTTMP/distributed-chain-building/server
searching for changes
adding changesets
adding manifests
@@ -276,6 +276,7 @@
$ hg up 'desc("c_A")'
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m 'c_A1'
+ 1 new orphan changesets
$ hg rebase -r 'desc("c_B0")' -d . # no easy way to rewrite the message with the rebase
rebasing 2:ef908e42ce65 "c_B0"
$ hg up
@@ -302,9 +303,9 @@
o 0:e82fb8d02bbf ROOT
$ hg debugobsolete
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
$ cd ..
Celeste pulls from Bob and rewrites them again
@@ -323,6 +324,7 @@
$ hg up 'desc("c_A")'
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m 'c_A2'
+ 1 new orphan changesets
$ hg rebase -r 'desc("c_B1")' -d . # no easy way to rewrite the message with the rebase
rebasing 2:956063ac4557 "c_B1"
$ hg up
@@ -349,12 +351,12 @@
o 0:e82fb8d02bbf ROOT
$ hg debugobsolete
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
- 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'celeste'}
- 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
+ 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'celeste'}
+ 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
Celeste now pushes to the server
@@ -362,7 +364,7 @@
However using a central server seems more common)
$ hg push
- pushing to $TESTTMP/distributed-chain-building/distributed-chain-building/server (glob)
+ pushing to $TESTTMP/distributed-chain-building/distributed-chain-building/server
searching for changes
adding changesets
adding manifests
@@ -380,7 +382,7 @@
$ hg up 'desc(ROOT)'
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg pull
- pulling from $TESTTMP/distributed-chain-building/distributed-chain-building/server (glob)
+ pulling from $TESTTMP/distributed-chain-building/distributed-chain-building/server
searching for changes
adding changesets
adding manifests
@@ -391,12 +393,12 @@
new changesets 9866d64649a5:77ae25d99ff0
(run 'hg heads' to see heads)
$ hg debugobsolete
- 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'celeste'}
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
+ 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'celeste'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
Then, she pulls from Bob, pulling predecessors of the changeset she has
already pulled. The changesets are not obsoleted in the Bob repo yet. Their
@@ -418,12 +420,12 @@
@ 0:e82fb8d02bbf ROOT
$ hg debugobsolete
- 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'celeste'}
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
+ 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'celeste'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
Same tests, but change coming from a bundle
(testing with a bundle is interesting because absolutely no discovery or
@@ -439,12 +441,12 @@
@ 0:e82fb8d02bbf ROOT
$ hg debugobsolete
- 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'celeste'}
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
+ 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'celeste'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
$ hg -R ../repo-Bob bundle ../step-1.hg
searching for changes
2 changesets found
@@ -477,11 +479,11 @@
@ 0:e82fb8d02bbf ROOT
$ hg debugobsolete
- 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'celeste'}
- 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'celeste'}
- d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'bob'}
- ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'bob'}
+ 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 77ae25d99ff07889e181126b1171b94bec8e5227 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5b5708a437f27665db42c5a261a539a1bcb2a8c2 9866d64649a5d9c5991fe119c7b2c33898114e10 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'celeste'}
+ 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 956063ac4557828781733b2d5677a351ce856f59 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ 956063ac4557828781733b2d5677a351ce856f59 3cf8de21cc2282186857d2266eb6b1f9cb85ecf3 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'celeste'}
+ d33b0a3a64647d79583526be8107802b1f9fedfa 5b5708a437f27665db42c5a261a539a1bcb2a8c2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'bob'}
+ ef908e42ce65ef57f970d799acaddde26f58a4cc 5ffb9e311b35f6ab6f76f667ca5d6e595645481b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'bob'}
$ cd ..
--- a/tests/test-obsolete-divergent.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete-divergent.t Mon Jan 22 17:53:02 2018 -0500
@@ -61,10 +61,11 @@
$ hg debugobsolete `getid A_0` `getid A_1`
obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
+ 2 new content-divergent changesets
$ hg log -G --hidden
- o 3:392fd25390da A_2
+ * 3:392fd25390da A_2
|
- | o 2:82623d38b9ba A_1
+ | * 2:82623d38b9ba A_1
|/
| x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
|/
@@ -120,6 +121,7 @@
$ hg debugobsolete `getid A_0` `getid A_1`
obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
+ 2 new content-divergent changesets
$ mkcommit A_3
created new head
$ hg debugobsolete `getid A_2` `getid A_3`
@@ -129,7 +131,7 @@
|
| x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
|/
- | o 2:82623d38b9ba A_1
+ | * 2:82623d38b9ba A_1
|/
| x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
|/
@@ -180,10 +182,11 @@
obsoleted 1 changesets
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
$ hg debugobsolete `getid A_0` `getid A_2`
+ 2 new content-divergent changesets
$ hg log -G --hidden
- o 3:392fd25390da A_2
+ * 3:392fd25390da A_2
|
- | o 2:82623d38b9ba A_1
+ | * 2:82623d38b9ba A_1
|/
| x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
|/
@@ -250,6 +253,7 @@
$ hg debugobsolete `getid A_0` `getid A_1`
obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
+ 2 new content-divergent changesets
$ mkcommit A_3
created new head
$ hg debugobsolete `getid A_1` `getid A_3`
@@ -430,12 +434,13 @@
created new head
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `getid A_5` `getid A_9`
+ 4 new content-divergent changesets
$ hg log -G --hidden
- o 10:bed64f5d2f5a A_9
+ * 10:bed64f5d2f5a A_9
|
- | o 9:14608b260df8 A_8
+ | * 9:14608b260df8 A_8
|/
- | o 8:7ae126973a96 A_7
+ | * 8:7ae126973a96 A_7
|/
| x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
| |
@@ -443,7 +448,7 @@
|/
| x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
|/
- | o 4:01f36c5a8fda A_3
+ | * 4:01f36c5a8fda A_3
|/
| x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
|/
@@ -670,16 +675,17 @@
$ rm .hg/localtags
$ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
+ 2 new content-divergent changesets
$ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
@ 5:1a2a9b5b0030 B2 content-divergent
|
- | o 4:70d5a63ca112 B4 content-divergent
+ | * 4:70d5a63ca112 B4 content-divergent
| |
| o 1:48b9aae0607f Z
|
o 0:426bada5c675 A
$ hg debugobsolete
- a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'test', 'user': 'test'}
- ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'test', 'user': 'test'}
+ a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'test', 'user': 'test'}
+ ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'test', 'user': 'test'}
--- a/tests/test-obsolete.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-obsolete.t Mon Jan 22 17:53:02 2018 -0500
@@ -62,6 +62,8 @@
$ hg tip
-1:000000000000 (public) [tip ]
$ hg up --hidden tip --quiet
+ updating to a hidden changeset 97b7c2d76b18
+ (hidden revision '97b7c2d76b18' is pruned)
Killing a single changeset with itself should fail
(simple local safeguard)
@@ -196,7 +198,7 @@
abort: unknown revision '6'!
[255]
$ hg log -r 4
- abort: hidden revision '4'!
+ abort: hidden revision '4' was rewritten as: 5601fb93a350!
(use --hidden to access hidden revisions)
[255]
$ hg debugrevspec 'rev(6)'
@@ -207,6 +209,7 @@
Check that public changeset are not accounted as obsolete:
$ hg --hidden phase --public 2
+ 1 new phase-divergent changesets
$ hg log -G
@ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
|
@@ -338,6 +341,17 @@
$ cd ..
+Can disable transaction summary report
+
+ $ hg init transaction-summary
+ $ cd transaction-summary
+ $ mkcommit a
+ $ mkcommit b
+ $ hg up -q null
+ $ hg --config experimental.evolution.report-instabilities=false debugobsolete `getid a`
+ obsoleted 1 changesets
+ $ cd ..
+
Exchange Test
============================
@@ -518,6 +532,7 @@
$ mkcommit original_e
$ hg debugobsolete --record-parents `getid original_d` -d '0 0'
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete | grep `getid original_d`
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
$ hg log -r 'obsolete()'
@@ -582,6 +597,7 @@
adding file changes
added 6 changesets with 6 changes to 6 files (+1 heads)
7 new obsolescence markers
+ 1 new orphan changesets
no warning displayed
@@ -917,7 +933,9 @@
$ hg debugobsolete `getid obsolete_e`
obsoleted 1 changesets
+ 2 new orphan changesets
$ hg debugobsolete `getid original_c` `getid babar`
+ 1 new phase-divergent changesets
$ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan()'
changeset: 7:50c51b361e60
user: test
@@ -1015,6 +1033,56 @@
orphan: 2 changesets
phase-divergent: 1 changesets
+#if serve
+
+ $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+ $ cat hg.pid >> $DAEMON_PIDS
+
+check obsolete changeset
+
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=paper' | grep '<span class="obsolete">'
+ <span class="phase">draft</span> <span class="obsolete">obsolete</span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=coal' | grep '<span class="obsolete">'
+ <span class="phase">draft</span> <span class="obsolete">obsolete</span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=gitweb' | grep '<span class="logtags">'
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=monoblue' | grep '<span class="logtags">'
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=spartan' | grep 'class="obsolete"'
+ <th class="obsolete">obsolete:</th>
+ <td class="obsolete">pruned</td>
+
+check an obsolete changeset that has been rewritten
+ $ get-with-headers.py localhost:$HGPORT 'rev/cda648ca50f5?style=paper' | grep rewritten
+ <td>rewritten as <a href="/rev/3de5eca88c00?style=paper">3de5eca88c00</a> </td>
+ $ get-with-headers.py localhost:$HGPORT 'rev/cda648ca50f5?style=coal' | grep rewritten
+ <td>rewritten as <a href="/rev/3de5eca88c00?style=coal">3de5eca88c00</a> </td>
+ $ get-with-headers.py localhost:$HGPORT 'rev/cda648ca50f5?style=gitweb' | grep rewritten
+ <tr><td>obsolete</td><td>rewritten as <a class="list" href="/rev/3de5eca88c00?style=gitweb">3de5eca88c00</a> </td></tr>
+ $ get-with-headers.py localhost:$HGPORT 'rev/cda648ca50f5?style=monoblue' | grep rewritten
+ <dt>obsolete</dt><dd>rewritten as <a href="/rev/3de5eca88c00?style=monoblue">3de5eca88c00</a> </dd>
+ $ get-with-headers.py localhost:$HGPORT 'rev/cda648ca50f5?style=spartan' | grep rewritten
+ <td class="obsolete">rewritten as <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a> </td>
+
+check changeset with instabilities
+
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=paper' | grep '<span class="instability">'
+ <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=coal' | grep '<span class="instability">'
+ <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=gitweb' | grep '<span class="logtags">'
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> </span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=monoblue' | grep '<span class="logtags">'
+ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> </span>
+ $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=spartan' | grep 'class="instabilities"'
+ <th class="instabilities">instabilities:</th>
+ <td class="instabilities">orphan phase-divergent </td>
+
+ $ killdaemons.py
+
+ $ rm hg.pid access.log errors.log
+#endif
+
Test incoming/outcoming with changesets obsoleted remotely, known locally
===============================================================================
@@ -1045,15 +1113,15 @@
o 0:d20a80d4def3 (draft) [ ] base
$ hg incoming
- comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
+ comparing with $TESTTMP/tmpe/repo-issue3805
searching for changes
2:323a9c3ddd91 (draft) [tip ] A
$ hg incoming --bundle ../issue3805.hg
- comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
+ comparing with $TESTTMP/tmpe/repo-issue3805
searching for changes
2:323a9c3ddd91 (draft) [tip ] A
$ hg outgoing
- comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
+ comparing with $TESTTMP/tmpe/repo-issue3805
searching for changes
1:29f0c6921ddd (draft) [tip ] A
@@ -1242,6 +1310,7 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo aa > a
$ hg amendtransient
+ 1 new orphan changesets
[1, 2]
Test cache consistency for the visible filter
@@ -1282,11 +1351,13 @@
$ echo "hello" > b
$ hg commit --amend -m "message"
$ hg book bookb -r 13bedc178fce --hidden
+ bookmarking hidden changeset 13bedc178fce
+ (hidden revision '13bedc178fce' was rewritten as: a9b1f8652753)
$ hg log -r 13bedc178fce
4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
$ hg book -d bookb
$ hg log -r 13bedc178fce
- abort: hidden revision '13bedc178fce'!
+ abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
(use --hidden to access hidden revisions)
[255]
@@ -1331,9 +1402,9 @@
$ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
- saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg
$ hg debugobsolete
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
$ hg log -G
@ 2:b0551702f918 (draft) [tip ] 2
|
@@ -1360,7 +1431,7 @@
searching for changes
no changes found
$ hg debugobsolete
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
$ hg log -G
@ 2:b0551702f918 (draft) [tip ] 2
|
@@ -1380,7 +1451,7 @@
$ hg strip -r 1 --config extensions.strip=
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg
$ hg debugobsolete
$ hg log -G
@ 0:a78f55e5508c (draft) [tip ] 0
@@ -1394,8 +1465,8 @@
e016b03fd86fcccc54817d120b90b751aaf367d6
b0551702f918510f01ae838ab03a463054c67b46
obsmarkers -- {}
- version: 1 (86 bytes)
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ version: 1 (92 bytes)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
phase-heads -- {}
b0551702f918510f01ae838ab03a463054c67b46 draft
@@ -1408,7 +1479,7 @@
new changesets e016b03fd86f:b0551702f918
(run 'hg update' to get a working copy)
$ hg debugobsolete | sort
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
$ hg log -G
o 2:b0551702f918 (draft) [tip ] 2
|
@@ -1444,15 +1515,15 @@
adding d
$ hg ci --amend -m dd --config experimental.evolution.track-operation=1
$ hg debugobsolete --index --rev "3+7"
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
$ hg debugobsolete --index --rev "3+7" -Tjson
[
{
"date": [0.0, 0],
"flag": 0,
"index": 1,
- "metadata": {"operation": "amend", "user": "test"},
+ "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
"prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
"succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
},
@@ -1460,7 +1531,7 @@
"date": [0.0, 0],
"flag": 0,
"index": 3,
- "metadata": {"operation": "amend", "user": "test"},
+ "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
"prednode": "4715cf767440ed891755448016c2b8cf70760c30",
"succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
}
@@ -1468,15 +1539,15 @@
Test the --delete option of debugobsolete command
$ hg debugobsolete --index
- 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
$ hg debugobsolete --delete 1 --delete 3
deleted 2 obsolescence markers
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
- 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
+ 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
Test adding changeset after obsmarkers affecting it
(eg: during pull, or unbundle)
@@ -1487,7 +1558,7 @@
$ getid .
$ hg --config extensions.strip= strip -r .
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg
$ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
$ hg unbundle ../bundle-2.hg
adding changesets
--- a/tests/test-origbackup-conflict.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-origbackup-conflict.t Mon Jan 22 17:53:02 2018 -0500
@@ -30,7 +30,7 @@
resolving manifests
b/c: replacing untracked file
getting b/c
- creating directory: $TESTTMP/repo/.hg/origbackups/b (glob)
+ creating directory: $TESTTMP/repo/.hg/origbackups/b
getting d
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark c1)
@@ -54,7 +54,7 @@
resolving manifests
b: replacing untracked file
getting b
- removing conflicting directory: $TESTTMP/repo/.hg/origbackups/b (glob)
+ removing conflicting directory: $TESTTMP/repo/.hg/origbackups/b
getting d
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark b1)
@@ -69,8 +69,8 @@
resolving manifests
b/c: replacing untracked file
getting b/c
- creating directory: $TESTTMP/repo/.hg/origbackups/b (glob)
- removing conflicting file: $TESTTMP/repo/.hg/origbackups/b (glob)
+ creating directory: $TESTTMP/repo/.hg/origbackups/b
+ removing conflicting file: $TESTTMP/repo/.hg/origbackups/b
getting d
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark c1)
@@ -107,8 +107,8 @@
b/c: replacing untracked file
d: replacing untracked file
getting b/c
- creating directory: $TESTTMP/repo/.hg/origbackups/b (glob)
- removing conflicting file: $TESTTMP/repo/.hg/origbackups/b (glob)
+ creating directory: $TESTTMP/repo/.hg/origbackups/b
+ removing conflicting file: $TESTTMP/repo/.hg/origbackups/b
getting d
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark c1)
@@ -128,9 +128,8 @@
resolving manifests
b/c: replacing untracked file
getting b/c
- creating directory: $TESTTMP/repo/.hg/badorigbackups/b (glob)
- abort: The system cannot find the path specified: '$TESTTMP/repo/.hg/badorigbackups/b' (glob) (windows !)
- abort: Not a directory: '$TESTTMP/repo/.hg/badorigbackups/b' (no-windows !)
+ creating directory: $TESTTMP/repo/.hg/badorigbackups/b
+ abort: $ENOTDIR$: '$TESTTMP/repo/.hg/badorigbackups/b'
[255]
$ cat .hg/badorigbackups
data
--- a/tests/test-parents.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-parents.t Mon Jan 22 17:53:02 2018 -0500
@@ -71,7 +71,7 @@
$ hg parents -r 2 ../a
- abort: ../a not under root '$TESTTMP/repo' (glob)
+ abort: ../a not under root '$TESTTMP/repo'
[255]
--- a/tests/test-pathconflicts-basic.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-pathconflicts-basic.t Mon Jan 22 17:53:02 2018 -0500
@@ -39,7 +39,7 @@
moving a to a~853701544ac3
getting a/b
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg update --clean .
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-pathconflicts-merge.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-pathconflicts-merge.t Mon Jan 22 17:53:02 2018 -0500
@@ -1,3 +1,5 @@
+#require symlink
+
Path conflict checking is currently disabled by default because of issue5716.
Turn it on for this test.
@@ -48,10 +50,10 @@
a/b: path conflict - a file or link has the same name as a directory
the local file has been renamed to a/b~0ed027b96f31
resolve manually then use 'hg resolve --mark a/b'
- moving a/b to a/b~0ed027b96f31 (glob)
+ moving a/b to a/b~0ed027b96f31
getting a/b/c/d
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg status
M a/b/c/d
@@ -75,7 +77,7 @@
the local file has been renamed to a/b~2ea68033e3be
resolve manually then use 'hg resolve --mark a/b'
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg status
M a/b/c/d
@@ -103,7 +105,7 @@
the remote file has been renamed to a/b~0ed027b96f31
resolve manually then use 'hg resolve --mark a/b'
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg status
A a/b~0ed027b96f31
@@ -126,7 +128,7 @@
the remote file has been renamed to a/b~2ea68033e3be
resolve manually then use 'hg resolve --mark a/b'
1 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg mv a/b~2ea68033e3be a/b.old
$ readlink.py a/b.old
--- a/tests/test-paths.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-paths.t Mon Jan 22 17:53:02 2018 -0500
@@ -20,35 +20,35 @@
$ echo 'dupe = ../b#tip' >> .hg/hgrc
$ echo 'expand = $SOMETHING/bar' >> .hg/hgrc
$ hg in dupe
- comparing with $TESTTMP/b (glob)
+ comparing with $TESTTMP/b
no changes found
[1]
$ cd ..
$ hg -R a in dupe
- comparing with $TESTTMP/b (glob)
+ comparing with $TESTTMP/b
no changes found
[1]
$ cd a
$ hg paths
- dupe = $TESTTMP/b#tip (glob)
- expand = $TESTTMP/a/$SOMETHING/bar (glob)
+ dupe = $TESTTMP/b#tip
+ expand = $TESTTMP/a/$SOMETHING/bar
$ SOMETHING=foo hg paths
- dupe = $TESTTMP/b#tip (glob)
- expand = $TESTTMP/a/foo/bar (glob)
+ dupe = $TESTTMP/b#tip
+ expand = $TESTTMP/a/foo/bar
#if msys
$ SOMETHING=//foo hg paths
- dupe = $TESTTMP/b#tip (glob)
+ dupe = $TESTTMP/b#tip
expand = /foo/bar
#else
$ SOMETHING=/foo hg paths
- dupe = $TESTTMP/b#tip (glob)
+ dupe = $TESTTMP/b#tip
expand = /foo/bar
#endif
$ hg paths -q
dupe
expand
$ hg paths dupe
- $TESTTMP/b#tip (glob)
+ $TESTTMP/b#tip
$ hg paths -q dupe
$ hg paths unknown
not found!
@@ -64,11 +64,11 @@
{
"name": "dupe",
"pushurl": "https://example.com/dupe",
- "url": "$TESTTMP/b#tip" (glob)
+ "url": "$TESTTMP/b#tip"
},
{
"name": "expand",
- "url": "$TESTTMP/a/$SOMETHING/bar" (glob)
+ "url": "$TESTTMP/a/$SOMETHING/bar"
}
]
$ hg paths -Tjson dupe | sed 's|\\\\|\\|g'
@@ -76,7 +76,7 @@
{
"name": "dupe",
"pushurl": "https://example.com/dupe",
- "url": "$TESTTMP/b#tip" (glob)
+ "url": "$TESTTMP/b#tip"
}
]
$ hg paths -Tjson -q unknown
@@ -89,21 +89,21 @@
(behaves as a {name: path-string} dict by default)
$ hg log -rnull -T '{peerurls}\n'
- dupe=$TESTTMP/b#tip expand=$TESTTMP/a/$SOMETHING/bar (glob)
+ dupe=$TESTTMP/b#tip expand=$TESTTMP/a/$SOMETHING/bar
$ hg log -rnull -T '{join(peerurls, "\n")}\n'
- dupe=$TESTTMP/b#tip (glob)
- expand=$TESTTMP/a/$SOMETHING/bar (glob)
+ dupe=$TESTTMP/b#tip
+ expand=$TESTTMP/a/$SOMETHING/bar
$ hg log -rnull -T '{peerurls % "{name}: {url}\n"}'
- dupe: $TESTTMP/b#tip (glob)
- expand: $TESTTMP/a/$SOMETHING/bar (glob)
+ dupe: $TESTTMP/b#tip
+ expand: $TESTTMP/a/$SOMETHING/bar
$ hg log -rnull -T '{get(peerurls, "dupe")}\n'
- $TESTTMP/b#tip (glob)
+ $TESTTMP/b#tip
(sub options can be populated by map/dot operation)
$ hg log -rnull \
> -T '{get(peerurls, "dupe") % "url: {url}\npushurl: {pushurl}\n"}'
- url: $TESTTMP/b#tip (glob)
+ url: $TESTTMP/b#tip
pushurl: https://example.com/dupe
$ hg log -rnull -T '{peerurls.dupe.pushurl}\n'
https://example.com/dupe
@@ -132,9 +132,9 @@
zeroconf wraps ui.configitems(), which shouldn't crash at least:
$ hg paths --config extensions.zeroconf=
- dupe = $TESTTMP/b#tip (glob)
+ dupe = $TESTTMP/b#tip
dupe:pushurl = https://example.com/dupe
- expand = $TESTTMP/a/$SOMETHING/bar (glob)
+ expand = $TESTTMP/a/$SOMETHING/bar
insecure = http://foo:***@example.com/
$ cd ..
--- a/tests/test-profile.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-profile.t Mon Jan 22 17:53:02 2018 -0500
@@ -143,12 +143,12 @@
$ hg root
otherextension: loaded
fooprof: loaded
- $TESTTMP/b (glob)
+ $TESTTMP/b
$ HGPROF=fooprof hg root --profile
fooprof: loaded
fooprof: start profile
otherextension: loaded
- $TESTTMP/b (glob)
+ $TESTTMP/b
fooprof: end profile
$ HGPROF=other hg root --profile 2>&1 | head -n 2
--- a/tests/test-pull-r.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-pull-r.t Mon Jan 22 17:53:02 2018 -0500
@@ -43,7 +43,7 @@
2:effea6de0384
1:ed1b79f46b9a
$ hg pull
- pulling from $TESTTMP/repo2 (glob)
+ pulling from $TESTTMP/repo2
searching for changes
adding changesets
adding manifests
@@ -144,3 +144,4 @@
$ cd ..
+ $ killdaemons.py
--- a/tests/test-pull-update.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-pull-update.t Mon Jan 22 17:53:02 2018 -0500
@@ -100,7 +100,7 @@
* active-before-pull 3:483b76ad4309
$ hg pull -u -r active-after-pull
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -129,7 +129,7 @@
* active-before-pull 3:483b76ad4309
$ hg pull -u $TESTTMP/t#active-after-pull
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -168,7 +168,7 @@
* active-before-pull 3:483b76ad4309
$ hg pull -u -r b5e4babfaaa7
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -195,7 +195,7 @@
* active-before-pull 3:483b76ad4309
$ hg pull -u -b bar
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -222,7 +222,7 @@
* active-before-pull 3:483b76ad4309
$ hg pull -u $TESTTMP/t#bar
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-partial-C1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-partial-C1.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/C1/server (glob)
+ pulling from $TESTTMP/C1/server
searching for changes
adding changesets
adding manifests
@@ -75,7 +75,7 @@
--------------
$ hg push
- pushing to $TESTTMP/C1/server (glob)
+ pushing to $TESTTMP/C1/server
searching for changes
abort: push creates new remote head 25c56d33e4c4!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-partial-C2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-partial-C2.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/C2/server (glob)
+ pulling from $TESTTMP/C2/server
searching for changes
adding changesets
adding manifests
@@ -61,10 +61,11 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg log -G --hidden
@ f6082bc4ffef (draft): A1
|
- | o d73caddc5533 (draft): B0
+ | * d73caddc5533 (draft): B0
| |
| x 8aaa48160adc (draft): A0
|/
@@ -75,7 +76,7 @@
--------------
$ hg push --rev 'desc(A1)'
- pushing to $TESTTMP/C2/server (glob)
+ pushing to $TESTTMP/C2/server
searching for changes
abort: push creates new remote head f6082bc4ffef!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-partial-C3.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-partial-C3.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/C3/server (glob)
+ pulling from $TESTTMP/C3/server
searching for changes
adding changesets
adding manifests
@@ -75,7 +75,7 @@
--------------
$ hg push
- pushing to $TESTTMP/C3/server (glob)
+ pushing to $TESTTMP/C3/server
searching for changes
abort: push creates new remote head 0f88766e02d6!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-partial-C4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-partial-C4.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/C4/server (glob)
+ pulling from $TESTTMP/C4/server
searching for changes
adding changesets
adding manifests
@@ -61,10 +61,11 @@
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
- | o d73caddc5533 (draft): B0
+ | * d73caddc5533 (draft): B0
| |
| x 8aaa48160adc (draft): A0
|/
@@ -75,7 +76,7 @@
--------------
$ hg push --rev 'desc(C0)'
- pushing to $TESTTMP/C4/server (glob)
+ pushing to $TESTTMP/C4/server
searching for changes
abort: push creates new remote head 0f88766e02d6!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-pruned-B1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B1.t Mon Jan 22 17:53:02 2018 -0500
@@ -62,7 +62,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B1/server (glob)
+ pushing to $TESTTMP/B1/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B2.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/B2/server (glob)
+ pulling from $TESTTMP/B2/server
searching for changes
adding changesets
adding manifests
@@ -61,6 +61,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -77,7 +78,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B2/server (glob)
+ pushing to $TESTTMP/B2/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B3.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B3.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/B3/server (glob)
+ pulling from $TESTTMP/B3/server
searching for changes
adding changesets
adding manifests
@@ -61,6 +61,7 @@
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -77,7 +78,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B3/server (glob)
+ pushing to $TESTTMP/B3/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B4.t Mon Jan 22 17:53:02 2018 -0500
@@ -48,7 +48,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/B4/server (glob)
+ pulling from $TESTTMP/B4/server
searching for changes
adding changesets
adding manifests
@@ -62,6 +62,7 @@
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -78,7 +79,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B4/server (glob)
+ pushing to $TESTTMP/B4/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B5.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B5.t Mon Jan 22 17:53:02 2018 -0500
@@ -51,7 +51,7 @@
$ mkcommit C0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/B5/server (glob)
+ pulling from $TESTTMP/B5/server
searching for changes
adding changesets
adding manifests
@@ -65,6 +65,7 @@
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
obsoleted 1 changesets
+ 2 new orphan changesets
$ hg debugobsolete `getid "desc(B0)"` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(C0)"`
@@ -85,7 +86,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B5/server (glob)
+ pushing to $TESTTMP/B5/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B6.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B6.t Mon Jan 22 17:53:02 2018 -0500
@@ -69,7 +69,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B6/server (glob)
+ pushing to $TESTTMP/B6/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B7.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B7.t Mon Jan 22 17:53:02 2018 -0500
@@ -68,7 +68,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B7/server (glob)
+ pushing to $TESTTMP/B7/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-pruned-B8.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-pruned-B8.t Mon Jan 22 17:53:02 2018 -0500
@@ -49,7 +49,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/B8/server (glob)
+ pulling from $TESTTMP/B8/server
searching for changes
adding changesets
adding manifests
@@ -68,6 +68,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(B1)"`
@@ -92,7 +93,7 @@
--------------
$ hg push
- pushing to $TESTTMP/B8/server (glob)
+ pushing to $TESTTMP/B8/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A1.t Mon Jan 22 17:53:02 2018 -0500
@@ -59,7 +59,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A1/server (glob)
+ pushing to $TESTTMP/A1/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A2.t Mon Jan 22 17:53:02 2018 -0500
@@ -46,7 +46,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/A2/server (glob)
+ pulling from $TESTTMP/A2/server
searching for changes
adding changesets
adding manifests
@@ -61,6 +61,7 @@
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -79,7 +80,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A2/server (glob)
+ pushing to $TESTTMP/A2/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A3.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A3.t Mon Jan 22 17:53:02 2018 -0500
@@ -49,7 +49,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/A3/server (glob)
+ pulling from $TESTTMP/A3/server
searching for changes
adding changesets
adding manifests
@@ -64,6 +64,7 @@
$ mkcommit A1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -82,7 +83,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A3/server (glob)
+ pushing to $TESTTMP/A3/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A4.t Mon Jan 22 17:53:02 2018 -0500
@@ -64,7 +64,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A4/server (glob)
+ pushing to $TESTTMP/A4/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A5.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A5.t Mon Jan 22 17:53:02 2018 -0500
@@ -64,7 +64,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A5/server (glob)
+ pushing to $TESTTMP/A5/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A6.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A6.t Mon Jan 22 17:53:02 2018 -0500
@@ -53,7 +53,7 @@
created new head
$ cd ../client
$ hg pull
- pulling from $TESTTMP/A6/server (glob)
+ pulling from $TESTTMP/A6/server
searching for changes
adding changesets
adding manifests
@@ -70,6 +70,7 @@
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -90,7 +91,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A6/server (glob)
+ pushing to $TESTTMP/A6/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A7.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A7.t Mon Jan 22 17:53:02 2018 -0500
@@ -53,7 +53,7 @@
created new head
$ cd ../client
$ hg pull
- pulling from $TESTTMP/A7/server (glob)
+ pulling from $TESTTMP/A7/server
searching for changes
adding changesets
adding manifests
@@ -70,6 +70,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -90,7 +91,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A7/server (glob)
+ pushing to $TESTTMP/A7/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-superceed-A8.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-superceed-A8.t Mon Jan 22 17:53:02 2018 -0500
@@ -70,7 +70,7 @@
--------------
$ hg push
- pushing to $TESTTMP/A8/server (glob)
+ pushing to $TESTTMP/A8/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-unpushed-D1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D1.t Mon Jan 22 17:53:02 2018 -0500
@@ -68,7 +68,7 @@
--------------
$ hg push -r 'desc(B0)'
- pushing to $TESTTMP/D1/server (glob)
+ pushing to $TESTTMP/D1/server
searching for changes
abort: push creates new remote head 74ff5441d343!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-unpushed-D2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D2.t Mon Jan 22 17:53:02 2018 -0500
@@ -51,7 +51,7 @@
$ mkcommit B0
$ cd ../client
$ hg pull
- pulling from $TESTTMP/D2/server (glob)
+ pulling from $TESTTMP/D2/server
searching for changes
adding changesets
adding manifests
@@ -65,6 +65,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
obsoleted 1 changesets
$ hg up 0
@@ -87,7 +88,7 @@
--------------
$ hg push --rev 'desc(C0)'
- pushing to $TESTTMP/D2/server (glob)
+ pushing to $TESTTMP/D2/server
searching for changes
abort: push creates new remote head 0f88766e02d6!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-unpushed-D3.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D3.t Mon Jan 22 17:53:02 2018 -0500
@@ -50,7 +50,7 @@
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ cd ../client
$ hg pull
- pulling from $TESTTMP/D3/server (glob)
+ pulling from $TESTTMP/D3/server
searching for changes
adding changesets
adding manifests
@@ -68,6 +68,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -86,13 +87,13 @@
--------------
$ hg push --rev 'desc(A1)'
- pushing to $TESTTMP/D3/server (glob)
+ pushing to $TESTTMP/D3/server
searching for changes
abort: push creates new remote head f6082bc4ffef!
(merge or see 'hg help push' for details about pushing new heads)
[255]
$ hg push --rev 'desc(B1)'
- pushing to $TESTTMP/D3/server (glob)
+ pushing to $TESTTMP/D3/server
searching for changes
abort: push creates new remote head 25c56d33e4c4!
(merge or see 'hg help push' for details about pushing new heads)
@@ -104,7 +105,7 @@
In this case, even a bare push is creating more heads
$ hg push
- pushing to $TESTTMP/D3/server (glob)
+ pushing to $TESTTMP/D3/server
searching for changes
abort: push creates new remote head 25c56d33e4c4!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-unpushed-D4.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D4.t Mon Jan 22 17:53:02 2018 -0500
@@ -67,7 +67,7 @@
created new head
$ cd ../client
$ hg pull
- pulling from $TESTTMP/D4/server (glob)
+ pulling from $TESTTMP/D4/server
searching for changes
adding changesets
adding manifests
@@ -84,6 +84,7 @@
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -104,7 +105,7 @@
--------------------------------
$ hg push --rev 'desc(A1)'
- pushing to $TESTTMP/D4/server (glob)
+ pushing to $TESTTMP/D4/server
searching for changes
abort: push creates new remote head f6082bc4ffef!
(merge or see 'hg help push' for details about pushing new heads)
@@ -114,7 +115,7 @@
------------------------------------
$ hg push --rev 'desc(B1)'
- pushing to $TESTTMP/D4/server (glob)
+ pushing to $TESTTMP/D4/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-checkheads-unpushed-D5.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D5.t Mon Jan 22 17:53:02 2018 -0500
@@ -56,7 +56,7 @@
created new head
$ cd ../client
$ hg pull
- pulling from $TESTTMP/D5/server (glob)
+ pulling from $TESTTMP/D5/server
searching for changes
adding changesets
adding manifests
@@ -73,6 +73,7 @@
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
obsoleted 1 changesets
$ hg log -G --hidden
@@ -93,13 +94,13 @@
--------------
$ hg push --rev 'desc(B1)'
- pushing to $TESTTMP/D5/server (glob)
+ pushing to $TESTTMP/D5/server
searching for changes
abort: push creates new remote head 25c56d33e4c4!
(merge or see 'hg help push' for details about pushing new heads)
[255]
$ hg push --rev 'desc(A1)'
- pushing to $TESTTMP/D5/server (glob)
+ pushing to $TESTTMP/D5/server
searching for changes
adding changesets
adding manifests
@@ -107,5 +108,6 @@
added 1 changesets with 1 changes to 1 files
1 new obsolescence markers
obsoleted 1 changesets
+ 1 new orphan changesets
$ cd ../..
--- a/tests/test-push-checkheads-unpushed-D6.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D6.t Mon Jan 22 17:53:02 2018 -0500
@@ -75,7 +75,7 @@
--------------
$ hg push --rev 'desc(C0)'
- pushing to $TESTTMP/D6/server (glob)
+ pushing to $TESTTMP/D6/server
searching for changes
abort: push creates new remote head 0f88766e02d6!
(merge or see 'hg help push' for details about pushing new heads)
--- a/tests/test-push-checkheads-unpushed-D7.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-checkheads-unpushed-D7.t Mon Jan 22 17:53:02 2018 -0500
@@ -88,7 +88,7 @@
--------------
$ hg push --rev 'desc(C0)'
- pushing to $TESTTMP/D7/server (glob)
+ pushing to $TESTTMP/D7/server
searching for changes
adding changesets
adding manifests
--- a/tests/test-push-race.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push-race.t Mon Jan 22 17:53:02 2018 -0500
@@ -1707,6 +1707,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
obsoleted 1 changesets
+ 1 new orphan changesets
new changesets 720c5163ecf6
(run 'hg heads .' to see heads, 'hg merge' to merge)
$ hg -R ./client-other pull
@@ -1718,6 +1719,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
obsoleted 1 changesets
+ 1 new orphan changesets
new changesets 720c5163ecf6
(run 'hg heads .' to see heads, 'hg merge' to merge)
$ hg -R ./client-racy pull
@@ -1727,6 +1729,7 @@
adding manifests
adding file changes
added 1 changesets with 0 changes to 0 files
+ 1 new orphan changesets
new changesets a98a47d8b85b
(run 'hg update' to get a working copy)
@@ -1735,7 +1738,7 @@
$ hg -R server graph
o 720c5163ecf6 C-V (default)
|
- | o a98a47d8b85b C-U (default)
+ | * a98a47d8b85b C-U (default)
| |
| x b0ee3d6f51bc C-Q (default)
|/
@@ -1792,6 +1795,7 @@
$ hg -R client-other/ branch --force default
marked working directory as branch default
$ hg -R client-other/ commit -m "C-W"
+ 1 new orphan changesets
created new head
$ ID_V=`hg -R client-other log -T '{node}\n' -r 'desc("C-V")'`
$ ID_W=`hg -R client-other log -T '{node}\n' -r 'desc("C-W")'`
@@ -1818,6 +1822,7 @@
remote: added 1 changesets with 0 changes to 1 files (+1 heads)
remote: 1 new obsolescence markers
remote: obsoleted 1 changesets
+ remote: 1 new orphan changesets
$ release $TESTTMP/watchfile
@@ -1835,9 +1840,9 @@
b0ee3d6f51bc4c0ca6d4f2907708027a6c376233 720c5163ecf64dcc6216bee2d62bf3edb1882499 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
720c5163ecf64dcc6216bee2d62bf3edb1882499 39bc0598afe90ab18da460bafecc0fa953b77596 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
$ hg -R server graph --hidden
- o 39bc0598afe9 C-W (default)
+ * 39bc0598afe9 C-W (default)
|
- | o a98a47d8b85b C-U (default)
+ | * a98a47d8b85b C-U (default)
|/
x b0ee3d6f51bc C-Q (default)
|
--- a/tests/test-push.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-push.t Mon Jan 22 17:53:02 2018 -0500
@@ -199,7 +199,7 @@
[1]
$ hg push
- pushing to $TESTTMP/test-validation (glob)
+ pushing to $TESTTMP/test-validation
searching for changes
adding changesets
adding manifests
@@ -234,7 +234,7 @@
[1]
$ hg push
- pushing to $TESTTMP/test-validation (glob)
+ pushing to $TESTTMP/test-validation
searching for changes
adding changesets
adding manifests
--- a/tests/test-pushvars.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-pushvars.t Mon Jan 22 17:53:02 2018 -0500
@@ -24,7 +24,7 @@
$ echo b > a
$ hg commit -Aqm a
$ hg push --pushvars "DEBUG=1" --pushvars "BYPASS_REVIEW=true"
- pushing to $TESTTMP/repo (glob)
+ pushing to $TESTTMP/repo
searching for changes
adding changesets
adding manifests
@@ -38,7 +38,7 @@
$ echo b >> a
$ hg commit -Aqm a
$ hg push --pushvars "DEBUG=1" --pushvars "BYPASS_REVIEW=true"
- pushing to $TESTTMP/repo (glob)
+ pushing to $TESTTMP/repo
searching for changes
adding changesets
adding manifests
@@ -52,7 +52,7 @@
$ echo b >> a
$ hg commit -Aqm a
$ hg push --pushvars "DEBUG="
- pushing to $TESTTMP/repo (glob)
+ pushing to $TESTTMP/repo
searching for changes
adding changesets
adding manifests
@@ -65,7 +65,7 @@
$ echo b >> a
$ hg commit -Aqm b
$ hg push --pushvars "DEBUG"
- pushing to $TESTTMP/repo (glob)
+ pushing to $TESTTMP/repo
searching for changes
abort: unable to parse variable 'DEBUG', should follow 'KEY=VALUE' or 'KEY=' format
[255]
--- a/tests/test-rebase-abort.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-abort.t Mon Jan 22 17:53:02 2018 -0500
@@ -115,7 +115,7 @@
Abort (should clear out unsupported merge state):
$ hg rebase --abort
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/3e046f2ecedb-6beef7d5-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/3e046f2ecedb-6beef7d5-backup.hg
rebase aborted
$ hg debugmergestate
no merge state found
@@ -377,7 +377,7 @@
abort: ^C
[255]
$ hg rebase --abort
- saved backup bundle to $TESTTMP/interrupted/.hg/strip-backup/3d8812cf300d-93041a90-backup.hg (glob)
+ saved backup bundle to $TESTTMP/interrupted/.hg/strip-backup/3d8812cf300d-93041a90-backup.hg
rebase aborted
$ hg log -G --template "{rev} {desc} {bookmarks}"
o 6 no-a
--- a/tests/test-rebase-base-flag.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-base-flag.t Mon Jan 22 17:53:02 2018 -0500
@@ -10,7 +10,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: {desc}"
+ > tglog = log -G --template "{rev}: {node|short} {desc}"
> EOF
$ rebasewithdag() {
@@ -39,19 +39,19 @@
rebasing 3:d6003a550c2c "C" (C)
rebasing 5:4526cf523425 "D" (D)
rebasing 6:b296604d9846 "E" (E tip)
- o 6: E
+ o 6: 4870f5e7df37 E
|
- | o 5: D
+ | o 5: dc999528138a D
|/
- o 4: C
+ o 4: 6b3e11729672 C
|
- o 3: Z
+ o 3: 57e70bad1ea3 Z
|
- | o 2: B
+ | o 2: c1e6b162678d B
|/
- o 1: A
+ o 1: 21a6c4502885 A
|
- o 0: R
+ o 0: b41ce7760717 R
Multiple branching points caused by selecting a single merge changeset:
@@ -69,19 +69,19 @@
rebasing 2:c1e6b162678d "B" (B)
rebasing 3:d6003a550c2c "C" (C)
rebasing 6:54c8f00cb91c "E" (E tip)
- o 6: E
+ o 6: 00598421b616 E
|\
- | o 5: C
+ | o 5: 6b3e11729672 C
| |
- o | 4: B
+ o | 4: 85260910e847 B
|/
- o 3: Z
+ o 3: 57e70bad1ea3 Z
|
- | o 2: D
+ | o 2: 8924700906fe D
|/
- o 1: A
+ o 1: 21a6c4502885 A
|
- o 0: R
+ o 0: b41ce7760717 R
Rebase should not extend the "--base" revset using "descendants":
@@ -96,17 +96,17 @@
> EOS
rebasing 2:c1e6b162678d "B" (B)
rebasing 5:54c8f00cb91c "E" (E tip)
- o 5: E
+ o 5: e583bf3ff54c E
|\
- | o 4: B
+ | o 4: 85260910e847 B
| |
- | o 3: Z
+ | o 3: 57e70bad1ea3 Z
| |
- o | 2: C
+ o | 2: d6003a550c2c C
|/
- o 1: A
+ o 1: 21a6c4502885 A
|
- o 0: R
+ o 0: b41ce7760717 R
Rebase should not simplify the "--base" revset using "roots":
@@ -122,17 +122,17 @@
rebasing 2:c1e6b162678d "B" (B)
rebasing 3:d6003a550c2c "C" (C)
rebasing 5:54c8f00cb91c "E" (E tip)
- o 5: E
+ o 5: 00598421b616 E
|\
- | o 4: C
+ | o 4: 6b3e11729672 C
| |
- o | 3: B
+ o | 3: 85260910e847 B
|/
- o 2: Z
+ o 2: 57e70bad1ea3 Z
|
- o 1: A
+ o 1: 21a6c4502885 A
|
- o 0: R
+ o 0: b41ce7760717 R
The destination is one of the two branching points of a merge:
@@ -173,31 +173,31 @@
rebasing 8:781512f5e33d "C2" (C2)
rebasing 9:428d8c18f641 "E1" (E1)
rebasing 11:e1bf82f6b6df "E2" (E2)
- o 12: E2
+ o 12: e4a37b6fdbd2 E2
|
- o 11: E1
+ o 11: 9675bea983df E1
|
- | o 10: C2
+ | o 10: 4faf5d4c80dc C2
| |
- | o 9: C1
+ | o 9: d4799b1ad57d C1
|/
- | o 8: B2
+ | o 8: 772732dc64d6 B2
| |
- | o 7: B1
+ | o 7: ad3ac528a49f B1
|/
- o 6: Z
+ o 6: 2cbdfca6b9d5 Z
|
- o 5: F
+ o 5: fcdb3293ec13 F
|
- o 4: E
+ o 4: a4652bb8ac54 E
|
- o 3: C
+ o 3: bd5548558fcf C
|
- o 2: B
+ o 2: c1e6b162678d B
|
- o 1: A
+ o 1: 21a6c4502885 A
|
- o 0: R
+ o 0: b41ce7760717 R
Multiple branching points with multiple merges:
@@ -223,37 +223,37 @@
rebasing 11:d1f6d0c3c7e4 "M" (M)
rebasing 12:7aaec6f81888 "N" (N)
rebasing 15:325bc8f1760d "P" (P tip)
- o 15: P
+ o 15: 6ef6a0ea3b18 P
|\
- | o 14: N
+ | o 14: 20ba3610a7e5 N
| |\
- o \ \ 13: M
+ o \ \ 13: cd4f6c06d2ab M
|\ \ \
- | | | o 12: L
+ | | | o 12: bca872041455 L
| | | |
- | | o | 11: K
+ | | o | 11: 7bbb6c8a6ad7 K
| | |/
- | o / 10: J
+ | o / 10: de0cbffe893e J
| |/
- o / 9: I
+ o / 9: 0e710f176a88 I
|/
- | o 8: H
+ | o 8: 52507bab39ca H
| |\
- | | | o 7: G
+ | | | o 7: bb5fe4652f0d G
| | |/|
- | | | o 6: F
+ | | | o 6: f4ad4b31daf4 F
| | | |
- | | o | 5: E
+ | | o | 5: b168f85f2e78 E
| | |/
- | o | 4: D
+ | o | 4: 8d09fcdb5594 D
| |\|
- +---o 3: C
+ +---o 3: ab70b4c5a9c9 C
| |
- o | 2: Z
+ o | 2: 262e37e34f63 Z
| |
- | o 1: B
+ | o 1: 112478962961 B
|/
- o 0: A
+ o 0: 426bada5c675 A
Slightly more complex merge case (mentioned in https://www.mercurial-scm.org/pipermail/mercurial-devel/2016-November/091074.html):
@@ -275,31 +275,31 @@
rebasing 11:4e449bd1a643 "A3" (A3)
rebasing 10:0a33b0519128 "B1" (B1)
rebasing 12:209327807c3a "B3" (B3 tip)
- o 12: B3
+ o 12: ceb984566332 B3
|\
- | o 11: B1
+ | o 11: 19d93caac497 B1
| |
- | | o 10: A3
+ | | o 10: 058e73d3916b A3
| | |\
- | +---o 9: A2
+ | +---o 9: 0ba13ad72234 A2
| | |
- | o | 8: C1
+ | o | 8: c122c2af10c6 C1
| | |
- o | | 7: B2
+ o | | 7: 74275896650e B2
| | |
- | o | 6: C0
+ | o | 6: 455ba9bd3ea2 C0
|/ /
- o | 5: Z
+ o | 5: b3d7d2fda53b Z
| |
- o | 4: M3
+ o | 4: 182ab6383dd7 M3
| |
- o | 3: M2
+ o | 3: 6c3f73563d5f M2
| |
- | o 2: A1
+ | o 2: 88c860fffcc2 A1
|/
- o 1: M1
+ o 1: bc852baa85dd M1
|
- o 0: M0
+ o 0: dbdfc5c9bcd5 M0
Disconnected graph:
@@ -320,15 +320,15 @@
> EOF
rebasing 2:112478962961 "B" (B)
rebasing 3:b70f76719894 "D" (D)
- o 4: D
+ o 4: 511efad7bf13 D
|
- | o 3: B
+ | o 3: 25c4e279af62 B
|/
- o 2: Z
+ o 2: 3a49f54d7bb1 Z
|\
- | o 1: C
+ | o 1: 96cc3511f894 C
|
- o 0: A
+ o 0: 426bada5c675 A
Multiple roots. One root is not an ancestor of dest:
@@ -351,17 +351,17 @@
> EOF
rebasing 2:f675d5a1c6a4 "B" (B)
rebasing 5:f68696fe6af8 "E" (E tip)
- o 5: E
+ o 5: f6e6f5081554 E
|\
- | o 4: B
+ | o 4: 30cabcba27be B
| |\
- | | o 3: Z
+ | | o 3: 262e37e34f63 Z
| | |
- o | | 2: D
+ o | | 2: b70f76719894 D
|/ /
- o / 1: C
+ o / 1: 96cc3511f894 C
/
- o 0: A
+ o 0: 426bada5c675 A
Multiple roots. Two children share two parents while dest has only one parent:
@@ -372,13 +372,13 @@
> EOF
rebasing 2:f675d5a1c6a4 "B" (B)
rebasing 3:c2a779e13b56 "D" (D)
- o 4: D
+ o 4: 5eecd056b5f8 D
|\
- +---o 3: B
+ +---o 3: 30cabcba27be B
| |/
- | o 2: Z
+ | o 2: 262e37e34f63 Z
| |
- o | 1: C
+ o | 1: 96cc3511f894 C
/
- o 0: A
+ o 0: 426bada5c675 A
--- a/tests/test-rebase-bookmarks.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-bookmarks.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' bookmarks: {bookmarks}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n"
> EOF
Create a repo with several bookmarks
@@ -39,13 +39,13 @@
$ hg book W
$ hg tglog
- @ 3: 'D' bookmarks: W
+ @ 3: 41acb9dca9eb 'D' bookmarks: W
|
- | o 2: 'C' bookmarks: Y Z
+ | o 2: 49cb3485fa0c 'C' bookmarks: Y Z
| |
- | o 1: 'B' bookmarks: X
+ | o 1: 6c81ed0049f8 'B' bookmarks: X
|/
- o 0: 'A' bookmarks:
+ o 0: 1994f17a630e 'A' bookmarks:
Move only rebased bookmarks
@@ -66,26 +66,26 @@
$ hg book -r 0 Y@diverge
$ hg tglog
- o 3: 'D' bookmarks: W X@diverge Z@diverge
+ o 3: 41acb9dca9eb 'D' bookmarks: W X@diverge Z@diverge
|
- | @ 2: 'C' bookmarks: Y Z
+ | @ 2: 49cb3485fa0c 'C' bookmarks: Y Z
| |
- | o 1: 'B' bookmarks: X
+ | o 1: 6c81ed0049f8 'B' bookmarks: X
|/
- o 0: 'A' bookmarks: Y@diverge
+ o 0: 1994f17a630e 'A' bookmarks: Y@diverge
$ hg rebase -s Y -d 3
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg
$ hg tglog
- @ 3: 'C' bookmarks: Y Z
+ @ 3: 17fb3faba63c 'C' bookmarks: Y Z
|
- o 2: 'D' bookmarks: W X@diverge
+ o 2: 41acb9dca9eb 'D' bookmarks: W X@diverge
|
- | o 1: 'B' bookmarks: X
+ | o 1: 6c81ed0049f8 'B' bookmarks: X
|/
- o 0: 'A' bookmarks: Y@diverge
+ o 0: 1994f17a630e 'A' bookmarks: Y@diverge
Do not try to keep active but deleted divergent bookmark
@@ -98,7 +98,7 @@
$ hg rebase -s W -d .
rebasing 3:41acb9dca9eb "D" (W tip)
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg
$ hg bookmarks
W 3:0d3554f74897
@@ -117,16 +117,16 @@
$ hg rebase -s 1 -d 3
rebasing 1:6c81ed0049f8 "B" (X)
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg
$ hg tglog
- @ 3: 'C' bookmarks: Y Z
+ @ 3: 3d5fa227f4b5 'C' bookmarks: Y Z
|
- o 2: 'B' bookmarks: X
+ o 2: e926fccfa8ec 'B' bookmarks: X
|
- o 1: 'D' bookmarks: W
+ o 1: 41acb9dca9eb 'D' bookmarks: W
|
- o 0: 'A' bookmarks:
+ o 0: 1994f17a630e 'A' bookmarks:
Keep active bookmark on the correct changeset
@@ -140,16 +140,16 @@
$ hg rebase -d W
rebasing 1:6c81ed0049f8 "B" (X)
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg
$ hg tglog
- o 3: 'C' bookmarks: Y Z
+ o 3: 3d5fa227f4b5 'C' bookmarks: Y Z
|
- @ 2: 'B' bookmarks: X
+ @ 2: e926fccfa8ec 'B' bookmarks: X
|
- o 1: 'D' bookmarks: W
+ o 1: 41acb9dca9eb 'D' bookmarks: W
|
- o 0: 'A' bookmarks:
+ o 0: 1994f17a630e 'A' bookmarks:
$ hg bookmarks
W 1:41acb9dca9eb
@@ -180,17 +180,17 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 3:3d5fa227f4b5 "C" (Y Z)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg
$ hg tglog
- @ 4: 'C' bookmarks: Y Z
+ @ 4: 45c0f0ec1203 'C' bookmarks: Y Z
|
- o 3: 'other C' bookmarks:
+ o 3: b0e10b7175fd 'other C' bookmarks:
|
- o 2: 'B' bookmarks: X
+ o 2: e926fccfa8ec 'B' bookmarks: X
|
- o 1: 'D' bookmarks: W
+ o 1: 41acb9dca9eb 'D' bookmarks: W
|
- o 0: 'A' bookmarks:
+ o 0: 1994f17a630e 'A' bookmarks:
ensure that bookmarks given the names of revset functions can be used
@@ -210,7 +210,7 @@
rebasing 5:345c90f326a4 "bisect"
rebasing 6:f677a2907404 "bisect2"
rebasing 7:325c16001345 "bisect3" (bisect tip)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg
Bookmark and working parent get moved even if --keep is set (issue5682)
@@ -225,21 +225,21 @@
$ rm .hg/localtags
$ hg up -q B
$ hg tglog
- o 2: 'C' bookmarks: C
+ o 2: dc0947a82db8 'C' bookmarks: C
|
- | @ 1: 'B' bookmarks: B
+ | @ 1: 112478962961 'B' bookmarks: B
|/
- o 0: 'A' bookmarks: A
+ o 0: 426bada5c675 'A' bookmarks: A
$ hg rebase -r B -d C --keep
rebasing 1:112478962961 "B" (B)
$ hg tglog
- @ 3: 'B' bookmarks: B
+ @ 3: 9769fc65c4c5 'B' bookmarks: B
|
- o 2: 'C' bookmarks: C
+ o 2: dc0947a82db8 'C' bookmarks: C
|
- | o 1: 'B' bookmarks:
+ | o 1: 112478962961 'B' bookmarks:
|/
- o 0: 'A' bookmarks: A
+ o 0: 426bada5c675 'A' bookmarks: A
--- a/tests/test-rebase-cache.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-cache.t Mon Jan 22 17:53:02 2018 -0500
@@ -104,7 +104,7 @@
$ hg rebase -s 5 -d 8
rebasing 5:635859577d0b "D"
rebasing 6:5097051d331d "E"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-rebase.hg
$ hg branches
branch3 8:466cdfb14b62
@@ -166,7 +166,7 @@
$ hg rebase -s 8 -d 6
rebasing 8:4666b71e8e32 "F" (tip)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-rebase.hg
$ hg branches
branch2 8:6b4bdc1b5ac0
@@ -233,7 +233,7 @@
rebasing 7:653b9feb4616 "branch3"
note: rebase of 7:653b9feb4616 created no changes to commit
rebasing 8:4666b71e8e32 "F" (tip)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-rebase.hg
$ hg branches
branch2 7:6b4bdc1b5ac0
@@ -270,7 +270,7 @@
$ hg strip 2
0 files updated, 0 files merged, 4 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/0a03079c47fd-11b7c407-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/0a03079c47fd-11b7c407-backup.hg
$ hg tglog
o 3: 'C' branch2
@@ -329,7 +329,7 @@
$ hg strip 2
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/a3/b/.hg/strip-backup/a5b4b27ed7b4-a3b6984e-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/b/.hg/strip-backup/a5b4b27ed7b4-a3b6984e-backup.hg
$ hg theads
1: 'branch2' branch2
@@ -374,14 +374,14 @@
$ hg strip 3 4
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/67a385d4e6f2-b9243789-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/67a385d4e6f2-b9243789-backup.hg
$ hg theads
2: 'C'
$ hg strip 2 1
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg
$ hg theads
0: 'A'
@@ -480,4 +480,4 @@
HGEDITFORM=rebase.merge
rebasing 8:326cfedc031c "I" (tip)
HGEDITFORM=rebase.normal
- saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-rebase.hg
--- a/tests/test-rebase-check-restore.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-check-restore.t Mon Jan 22 17:53:02 2018 -0500
@@ -138,7 +138,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 5:01e6ebbd8272 "F" (tip)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-rebase.hg
$ hg tglog
@ 5:draft 'F' notdefault
--- a/tests/test-rebase-collapse.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-collapse.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,8 +7,8 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
- > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
+ > tglogp = log -G --template "{rev}: {node|short} {phase} '{desc}' {branches}\n"
> EOF
Create repo a:
@@ -26,21 +26,21 @@
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg tglog
- @ 7: 'H'
+ @ 7: 02de42196ebe 'H'
|
- | o 6: 'G'
+ | o 6: eea13746799a 'G'
|/|
- o | 5: 'F'
+ o | 5: 24b6387c8c8c 'F'
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -79,25 +79,25 @@
HG: added C
HG: added D
====
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglogp
- @ 5:secret 'Collapsed revision
+ @ 5: 30882080ba93 secret 'Collapsed revision
| * B
| * C
| * D
|
|
| edited manually'
- o 4:draft 'H'
+ o 4: 02de42196ebe draft 'H'
|
- | o 3:draft 'G'
+ | o 3: eea13746799a draft 'G'
|/|
- o | 2:draft 'F'
+ o | 2: 24b6387c8c8c draft 'F'
| |
- | o 1:draft 'E'
+ | o 1: 9520eea781bc draft 'E'
|/
- o 0:draft 'A'
+ o 0: cd010b8cd998 draft 'A'
$ hg manifest --rev tip
A
@@ -119,23 +119,23 @@
$ hg rebase --source 4 --collapse --dest 7
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg
$ hg tglog
- o 6: 'Collapsed revision
+ o 6: 7dd333a2d1e4 'Collapsed revision
| * E
| * G'
- @ 5: 'H'
+ @ 5: 02de42196ebe 'H'
|
- o 4: 'F'
+ o 4: 24b6387c8c8c 'F'
|
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg manifest --rev tip
A
@@ -162,22 +162,22 @@
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
HGEDITFORM=rebase.collapse
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg
$ hg tglog
- o 6: 'custom message'
+ o 6: 38ed6a6b026b 'custom message'
|
- @ 5: 'H'
+ @ 5: 02de42196ebe 'H'
|
- o 4: 'F'
+ o 4: 24b6387c8c8c 'F'
|
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg manifest --rev tip
A
@@ -238,21 +238,21 @@
created new head
$ hg tglog
- @ 7: 'H'
+ @ 7: c65502d41787 'H'
|
- | o 6: 'G'
+ | o 6: c772a8b2dc17 'G'
| |\
- | | o 5: 'F'
+ | | o 5: 7f219660301f 'F'
| | |
- | | o 4: 'E'
+ | | o 4: 8a5212ebc852 'E'
| | |
- | o | 3: 'D'
+ | o | 3: 2870ad076e54 'D'
| |\|
- | o | 2: 'C'
+ | o | 2: c5cefa58fd55 'C'
|/ /
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ cd ..
@@ -272,22 +272,22 @@
rebasing 4:8a5212ebc852 "E"
rebasing 5:7f219660301f "F"
rebasing 6:c772a8b2dc17 "G"
- saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-rebase.hg
$ hg tglog
- o 5: 'Collapsed revision
+ o 5: f97c4725bd99 'Collapsed revision
|\ * E
| | * F
| | * G'
- | @ 4: 'H'
+ | @ 4: c65502d41787 'H'
| |
- o | 3: 'D'
+ o | 3: 2870ad076e54 'D'
|\ \
- | o | 2: 'C'
+ | o | 2: c5cefa58fd55 'C'
| |/
- o / 1: 'B'
+ o / 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ hg manifest --rev tip
A
@@ -321,7 +321,7 @@
c65502d4178782309ce0574c5ae6ee9485a9bafa o default
$ hg strip 4
- saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg
$ cat $TESTTMP/b2/.hg/cache/branch2-served
c65502d4178782309ce0574c5ae6ee9485a9bafa 4
@@ -393,23 +393,23 @@
created new head
$ hg tglog
- @ 8: 'I'
+ @ 8: 46d6f0e29c20 'I'
|
- | o 7: 'H'
+ | o 7: 417d3b648079 'H'
| |\
- | | o 6: 'G'
+ | | o 6: 55a44ad28289 'G'
| | |
- | | o 5: 'F'
+ | | o 5: dca5924bb570 'F'
| | |
- | | o 4: 'E'
+ | | o 4: 8a5212ebc852 'E'
| | |
- | o | 3: 'D'
+ | o | 3: 2870ad076e54 'D'
| |\|
- | o | 2: 'C'
+ | o | 2: c5cefa58fd55 'C'
|/ /
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ cd ..
@@ -425,23 +425,23 @@
merging E
rebasing 6:55a44ad28289 "G"
rebasing 7:417d3b648079 "H"
- saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-rebase.hg
$ hg tglog
- o 5: 'Collapsed revision
+ o 5: 340b34a63b39 'Collapsed revision
|\ * E
| | * F
| | * G
| | * H'
- | @ 4: 'I'
+ | @ 4: 46d6f0e29c20 'I'
| |
- o | 3: 'D'
+ o | 3: 2870ad076e54 'D'
|\ \
- | o | 2: 'C'
+ | o | 2: c5cefa58fd55 'C'
| |/
- o / 1: 'B'
+ o / 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ hg manifest --rev tip
A
@@ -493,17 +493,17 @@
created new head
$ hg tglog
- @ 5: 'F'
+ @ 5: c137c2b8081f 'F'
|
- | o 4: 'E'
+ | o 4: 0a42590ed746 'E'
| |\
- | | o 3: 'D'
+ | | o 3: 7bbcd6078bcc 'D'
| | |
- | o | 2: 'C'
+ | o | 2: f838bfaca5c7 'C'
| |/
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ cd ..
@@ -518,17 +518,17 @@
rebasing 2:f838bfaca5c7 "C"
rebasing 3:7bbcd6078bcc "D"
rebasing 4:0a42590ed746 "E"
- saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-rebase.hg
$ hg tglog
- o 2: 'Collapsed revision
+ o 2: b72eaccb283f 'Collapsed revision
| * B
| * C
| * D
| * E'
- @ 1: 'F'
+ @ 1: c137c2b8081f 'F'
|
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ hg manifest --rev tip
A
@@ -564,13 +564,13 @@
adding d
$ hg tglog
- @ 3: 'D'
+ @ 3: 41acb9dca9eb 'D'
|
- | o 2: 'C' two
+ | o 2: 8ac4a08debf1 'C' two
| |
- | o 1: 'B' one
+ | o 1: 1ba175478953 'B' one
|/
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
$ hg rebase --keepbranches --collapse -s 1 -d 3
abort: cannot collapse multiple named branches
@@ -588,32 +588,32 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
created new head
$ hg tglog
- @ 5: 'E'
+ @ 5: fbfb97b1089a 'E'
|
- | o 4: 'E'
+ | o 4: f338eb3c2c7c 'E'
|/
- o 3: 'D'
+ o 3: 41acb9dca9eb 'D'
|
- | o 2: 'C' two
+ | o 2: 8ac4a08debf1 'C' two
| |
- | o 1: 'B' one
+ | o 1: 1ba175478953 'B' one
|/
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
$ hg rebase -s 5 -d 4
rebasing 5:fbfb97b1089a "E" (tip)
note: rebase of 5:fbfb97b1089a created no changes to commit
- saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-rebase.hg
$ hg tglog
- @ 4: 'E'
+ @ 4: f338eb3c2c7c 'E'
|
- o 3: 'D'
+ o 3: 41acb9dca9eb 'D'
|
- | o 2: 'C' two
+ | o 2: 8ac4a08debf1 'C' two
| |
- | o 1: 'B' one
+ | o 1: 1ba175478953 'B' one
|/
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
$ hg export tip
# HG changeset patch
@@ -646,13 +646,13 @@
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg up -q tip
$ hg tglog
- @ 3: 'move2'
+ @ 3: 338e84e2e558 'move2'
|
- o 2: 'move1'
+ o 2: 6e7340ee38c0 'move1'
|
- | o 1: 'change'
+ | o 1: 1352765a01d4 'change'
|/
- o 0: 'add'
+ o 0: f447d5abf5ea 'add'
$ hg rebase --collapse -d 1
rebasing 2:6e7340ee38c0 "move1"
@@ -662,7 +662,7 @@
rebasing 3:338e84e2e558 "move2" (tip)
merging f and c to c
merging e and g to g
- saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-rebase.hg
$ hg st
$ hg st --copies --change tip
A d
@@ -686,12 +686,12 @@
Test collapsing a middle revision in-place
$ hg tglog
- @ 2: 'Collapsed revision
+ @ 2: 64b456429f67 'Collapsed revision
| * move1
| * move2'
- o 1: 'change'
+ o 1: 1352765a01d4 'change'
|
- o 0: 'add'
+ o 0: f447d5abf5ea 'add'
$ hg rebase --collapse -r 1 -d 0
abort: can't remove original changesets with unrebased descendants
@@ -703,7 +703,7 @@
$ hg rebase --collapse -b . -d 0
rebasing 1:1352765a01d4 "change"
rebasing 2:64b456429f67 "Collapsed revision" (tip)
- saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-rebase.hg
$ hg st --change tip --copies
M a
M c
@@ -748,11 +748,11 @@
created new head
$ hg tglog
- @ 2: 'C'
+ @ 2: c5cefa58fd55 'C'
|
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
@@ -762,12 +762,12 @@
$ hg strip 2
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/f/.hg/strip-backup/c5cefa58fd55-629429f4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/f/.hg/strip-backup/c5cefa58fd55-629429f4-backup.hg
$ hg tglog
- o 1: 'B'
+ o 1: 27547f69f254 'B'
|
- @ 0: 'A'
+ @ 0: 4a2df7238c3b 'A'
@@ -795,7 +795,7 @@
$ hg rebase -d 0 -r "1::2" --collapse -m collapsed
rebasing 1:6d8d9f24eec3 "a"
rebasing 2:1cc73eca5ecc "b" (foo tip)
- saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-rebase.hg
$ hg log -G --template "{rev}: '{desc}' {bookmarks}"
@ 1: 'collapsed' foo
|
@@ -836,7 +836,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 2:b8d8db2b242d "a-dev" (tip)
- saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/b8d8db2b242d-f474c19a-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/b8d8db2b242d-f474c19a-rebase.hg
$ hg log
changeset: 2:45ba1d1a8665
tag: tip
--- a/tests/test-rebase-conflicts.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-conflicts.t Mon Jan 22 17:53:02 2018 -0500
@@ -102,7 +102,7 @@
already rebased 3:3163e20567cc "L1" as 3e046f2ecedb
rebasing 4:46f0b057b5c0 "L2"
rebasing 5:8029388f38dc "L3" (mybook)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-rebase.hg
$ hg tglog
@ 5:secret 'L3' mybook
@@ -235,6 +235,7 @@
$ hg rebase -s9 -d2 --debug # use debug to really check merge base used
rebase onto 4bc80088dc6b starting from e31216eec445
+ rebasing on disk
rebase status stored
rebasing 9:e31216eec445 "more changes to f1"
future parents are 2 and -1
@@ -299,7 +300,7 @@
bundle2-output-bundle: "HG20", (1 params) 2 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
- saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-rebase.hg
3 changesets found
list of changesets:
4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c
@@ -412,7 +413,7 @@
rebasing 1:112478962961 "B" (B)
rebasing 3:f585351a92f8 "D" (D)
warning: orphaned descendants detected, not stripping 112478962961
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/f585351a92f8-e536a9e4-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/f585351a92f8-e536a9e4-rebase.hg
$ rm .hg/localtags
$ hg tglog
--- a/tests/test-rebase-dest.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-dest.t Mon Jan 22 17:53:02 2018 -0500
@@ -21,15 +21,15 @@
[255]
$ hg rebase -d 1
rebasing 2:5db65b93a12b "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg
$ hg rebase -d 0 -r . -q
$ HGPLAIN=1 hg rebase
rebasing 2:889b0bc6a730 "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/889b0bc6a730-41ec4f81-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/889b0bc6a730-41ec4f81-rebase.hg
$ hg rebase -d 0 -r . -q
$ hg --config commands.rebase.requiredest=False rebase
rebasing 2:279de9495438 "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/279de9495438-ab0a5128-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/279de9495438-ab0a5128-rebase.hg
Requiring dest should not break continue or other rebase options
$ hg up 1 -q
@@ -56,7 +56,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 3:0537f6b50def "dc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0537f6b50def-be4c7386-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0537f6b50def-be4c7386-rebase.hg
$ cd ..
@@ -111,11 +111,10 @@
> [phases]
> publish=False
> [alias]
- > tglog = log -G --template "{rev}: {desc} {instabilities}" -r 'sort(all(), topo)'
+ > tglog = log -G --template "{rev}: {node|short} {desc} {instabilities}" -r 'sort(all(), topo)'
> [extensions]
> maprevset=$TESTTMP/maprevset.py
> [experimental]
- > rebase.multidest=true
> evolution=true
> EOF
@@ -175,13 +174,13 @@
already rebased 0:426bada5c675 "A" (A)
already rebased 2:dc0947a82db8 "C" (C)
rebasing 3:004dc1679908 "D" (D tip)
- o 4: D
+ o 4: d8d8601abd5e D
- o 2: C
+ o 2: dc0947a82db8 C
|
- | o 1: B
+ | o 1: fc2b737bb2e5 B
|
- o 0: A
+ o 0: 426bada5c675 A
Destination resolves to multiple changesets:
@@ -220,17 +219,17 @@
rebasing 4:26805aba1e60 "C" (C)
rebasing 3:cd488e83d208 "E" (E)
rebasing 5:0069ba24938a "F" (F tip)
- o 9: F
+ o 9: d150ff263fc8 F
|
- o 8: E
+ o 8: 66f30a1a2eab E
|
- | o 7: C
+ | o 7: 93db94ffae0e C
| |
- | o 6: B
+ | o 6: d0071c3b0c88 B
| |
- | o 1: D
+ | o 1: 058c1e1fb10a D
|
- o 0: A
+ o 0: 426bada5c675 A
Different destinations for merge changesets with a same root:
@@ -245,19 +244,19 @@
> EOS
rebasing 3:a4256619d830 "B" (B)
rebasing 6:8e139e245220 "C" (C tip)
- o 8: C
+ o 8: 51e2ce92e06a C
|\
- | o 7: B
+ | o 7: 2ed0c8546285 B
| |\
- o | | 5: G
+ o | | 5: 8fdb2c1feb20 G
| | |
- | | o 4: E
+ | | o 4: cd488e83d208 E
| | |
- o | | 2: F
+ o | | 2: a6661b868de9 F
/ /
- | o 1: D
+ | o 1: 058c1e1fb10a D
|
- o 0: A
+ o 0: 426bada5c675 A
Move to a previous parent:
@@ -275,21 +274,21 @@
rebasing 4:33441538d4aa "F" (F)
rebasing 6:cf43ad9da869 "G" (G)
rebasing 7:eef94f3b5f03 "H" (H tip)
- o 10: H
+ o 10: b3d84c6666cf H
|
- | o 5: D
+ | o 5: f585351a92f8 D
|/
- o 3: C
+ o 3: 26805aba1e60 C
|
- | o 9: G
+ | o 9: f7c28a1a15e2 G
|/
- o 1: B
+ o 1: 112478962961 B
|
- | o 8: F
+ | o 8: 02aa697facf7 F
|/
- | o 2: E
+ | o 2: 7fb047a69f22 E
|/
- o 0: A
+ o 0: 426bada5c675 A
Source overlaps with destination:
@@ -300,13 +299,13 @@
> EOS
rebasing 2:dc0947a82db8 "C" (C)
rebasing 1:112478962961 "B" (B)
- o 5: B
+ o 5: 5fe9935d5222 B
|
- o 4: C
+ o 4: 12d20731b9e0 C
|
- o 3: D
+ o 3: b18e25de2cf5 D
|
- o 0: A
+ o 0: 426bada5c675 A
Detect cycles early:
@@ -346,17 +345,17 @@
already rebased 3:b18e25de2cf5 "D" (D)
already rebased 4:312782b8f06e "E" (E)
already rebased 5:ad6717a6a58e "F" (F tip)
- o 5: F
+ o 5: ad6717a6a58e F
|
- o 3: D
+ o 3: b18e25de2cf5 D
|
- | o 4: E
+ | o 4: 312782b8f06e E
| |\
- +---o 2: C
+ +---o 2: dc0947a82db8 C
| |
- | o 1: B
+ | o 1: 112478962961 B
|/
- o 0: A
+ o 0: 426bada5c675 A
Massively rewrite the DAG:
@@ -380,27 +379,27 @@
rebasing 10:ae41898d7875 "K" (K tip)
rebasing 9:711f53bbef0b "G" (G)
rebasing 6:64a8289d2492 "F" (F)
- o 21: F
+ o 21: 3735afb3713a F
|
- o 20: G
+ o 20: 07698142d7a7 G
|
- o 19: K
+ o 19: 33aba52e7e72 K
|
- o 18: D
+ o 18: 9fdae89dc5a1 D
|
- o 17: E
+ o 17: 277dda9a65ee E
|
- o 16: B
+ o 16: 9c74fd8657ad B
|
- o 15: J
+ o 15: 6527eb0688bb J
|
- o 14: C
+ o 14: e94d655b928d C
|
- o 13: H
+ o 13: 620d6d349459 H
|
- o 12: A
+ o 12: a569a116758f A
|
- o 11: I
+ o 11: 2bf1302f5c18 I
Resolve instability:
@@ -421,33 +420,34 @@
> \| # amend: I -> I2
> A
> EOF
+ 6 new orphan changesets
rebasing 16:5c432343bf59 "J" (J tip)
rebasing 3:26805aba1e60 "C" (C)
rebasing 6:f585351a92f8 "D" (D)
rebasing 10:ffebc37c5d0b "E3" (E3)
rebasing 13:fb184bcfeee8 "F2" (F2)
rebasing 11:dc838ab4c0da "G" (G)
- o 22: G
+ o 22: 174f63d574a8 G
|
- o 21: F2
+ o 21: c9d9fbe76705 F2
|
- o 20: E3
+ o 20: 0a03c2ede755 E3
|
- o 19: D
+ o 19: 228d9d2541b1 D
|
- o 18: C
+ o 18: cd856b400c95 C
|
- o 17: J
+ o 17: 9148200c858c J
|
- o 15: I2
+ o 15: eb74780f5094 I2
|
- o 12: H
+ o 12: 78309edd643f H
|
- o 5: B2
+ o 5: 4b4531bd8e1d B2
|
- o 4: N
+ o 4: 337c285c272b N
|
- o 2: M
+ o 2: 699bc4b6fa22 M
|
- o 0: A
+ o 0: 426bada5c675 A
--- a/tests/test-rebase-detach.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-detach.t Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
> rebase=
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}'\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}'\n"
>
> [extensions]
> drawdag=$TESTDIR/drawdag.py
@@ -25,7 +25,7 @@
$ hg rebase -s D -d B
rebasing 3:e7b3f00ed42e "D" (D tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/e7b3f00ed42e-6f368371-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/e7b3f00ed42e-6f368371-rebase.hg
$ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
o 3:secret 'D'
@@ -62,18 +62,18 @@
$ hg rebase -s D -d B
rebasing 3:e7b3f00ed42e "D" (D)
rebasing 4:69a34c08022a "E" (E tip)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg
$ hg tglog
- o 4: 'E'
+ o 4: ee79e0744528 'E'
|
- o 3: 'D'
+ o 3: 10530e1d72d9 'D'
|
- | o 2: 'C'
+ | o 2: dc0947a82db8 'C'
| |
- o | 1: 'B'
+ o | 1: 112478962961 'B'
|/
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ hg manifest --rev tip
A
@@ -99,16 +99,16 @@
$ hg rebase -s C -d B
rebasing 2:dc0947a82db8 "C" (C)
rebasing 3:e7b3f00ed42e "D" (D tip)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/dc0947a82db8-b8481714-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/dc0947a82db8-b8481714-rebase.hg
$ hg tglog
- o 3: 'D'
+ o 3: 7375f3dbfb0f 'D'
|
- o 2: 'C'
+ o 2: bbfdd6cb49aa 'C'
|
- o 1: 'B'
+ o 1: 112478962961 'B'
|
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ hg manifest --rev tip
A
@@ -138,7 +138,7 @@
$ hg rebase --collapse -s D -d B
rebasing 3:e7b3f00ed42e "D" (D)
rebasing 4:69a34c08022a "E" (E tip)
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg
$ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
o 3:secret 'Collapsed revision
@@ -176,33 +176,33 @@
rebasing 2:dc0947a82db8 "C" (C)
rebasing 3:e7b3f00ed42e "D" (D)
rebasing 4:69a34c08022a "E" (E tip)
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/dc0947a82db8-3eefec98-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/dc0947a82db8-3eefec98-rebase.hg
$ hg tglog
- o 4: 'E'
+ o 4: e3d0c70d606d 'E'
|
- o 3: 'D'
+ o 3: e9153d36a1af 'D'
|
- o 2: 'C'
+ o 2: a7ac28b870a8 'C'
|
- o 1: 'B'
+ o 1: fc2b737bb2e5 'B'
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ hg rebase -d 1 -s 3
rebasing 3:e9153d36a1af "D"
rebasing 4:e3d0c70d606d "E" (tip)
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/e9153d36a1af-db7388ed-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/e9153d36a1af-db7388ed-rebase.hg
$ hg tglog
- o 4: 'E'
+ o 4: 2c24e540eccd 'E'
|
- o 3: 'D'
+ o 3: 73f786ed52ff 'D'
|
- | o 2: 'C'
+ | o 2: a7ac28b870a8 'C'
|/
- o 1: 'B'
+ o 1: fc2b737bb2e5 'B'
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ cd ..
@@ -231,42 +231,42 @@
$ echo "J" >> F
$ hg ci -m "J"
$ hg tglog
- @ 7: 'J'
+ @ 7: c6aaf0d259c0 'J'
|
- o 6: 'Merge'
+ o 6: 0cfbc7e8faaf 'Merge'
|\
- | o 5: 'I'
+ | o 5: b92d164ad3cb 'I'
| |
- o | 4: 'H'
+ o | 4: 4ea5b230dea3 'H'
| |
- | o 3: 'G'
+ | o 3: c6001eacfde5 'G'
|/|
- o | 2: 'F'
+ o | 2: 8908a377a434 'F'
| |
- | o 1: 'E'
+ | o 1: 7fb047a69f22 'E'
|/
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ hg rebase -s I -d H --collapse --config ui.merge=internal:other
rebasing 5:b92d164ad3cb "I" (I)
rebasing 6:0cfbc7e8faaf "Merge"
rebasing 7:c6aaf0d259c0 "J" (tip)
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/b92d164ad3cb-88fd7ab7-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/b92d164ad3cb-88fd7ab7-rebase.hg
$ hg tglog
- @ 5: 'Collapsed revision
+ @ 5: 65079693dac4 'Collapsed revision
| * I
| * Merge
| * J'
- o 4: 'H'
+ o 4: 4ea5b230dea3 'H'
|
- | o 3: 'G'
+ | o 3: c6001eacfde5 'G'
|/|
- o | 2: 'F'
+ o | 2: 8908a377a434 'F'
| |
- | o 1: 'E'
+ | o 1: 7fb047a69f22 'E'
|/
- o 0: 'A'
+ o 0: 426bada5c675 'A'
$ hg log --rev tip
@@ -305,7 +305,7 @@
$ hg rebase -c
rebasing 3:17b4880d2402 "B2" (tip)
note: rebase of 3:17b4880d2402 created no changes to commit
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/17b4880d2402-1ae1f6cc-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/17b4880d2402-1ae1f6cc-rebase.hg
$ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
o 2:draft 'C'
|
--- a/tests/test-rebase-emptycommit.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-emptycommit.t Mon Jan 22 17:53:02 2018 -0500
@@ -88,7 +88,7 @@
rebasing 4:69a34c08022a "E" (BOOK-E)
note: rebase of 4:69a34c08022a created no changes to commit
rebasing 5:6b2aeab91270 "F" (BOOK-F F)
- saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg
$ hg log -G -T '{rev} {desc} {bookmarks}'
o 5 F BOOK-F
|
@@ -136,7 +136,7 @@
note: rebase of 3:b18e25de2cf5 created no changes to commit
rebasing 4:86a1f6686812 "E" (BOOK-E E)
note: rebase of 4:86a1f6686812 created no changes to commit
- saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg
$ hg log -G -T '{rev} {desc} {bookmarks}'
o 4 H BOOK-C BOOK-D BOOK-E
@@ -186,7 +186,7 @@
rebasing 5:ad6717a6a58e "F" (BOOK-F)
note: rebase of 5:ad6717a6a58e created no changes to commit
rebasing 6:c58e8bdac1f4 "G" (BOOK-G G)
- saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg
$ hg log -G -T '{rev} {desc} {bookmarks}'
o 7 G BOOK-G
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-inmemory.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,159 @@
+#require symlink execbit
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > amend=
+ > rebase=
+ > debugdrawdag=$TESTDIR/drawdag.py
+ > [rebase]
+ > experimental.inmemory=1
+ > [diff]
+ > git=1
+ > [alias]
+ > tglog = log -G --template "{rev}: {node|short} '{desc}'\n"
+ > EOF
+
+Rebase a simple DAG:
+ $ hg init repo1
+ $ cd repo1
+ $ hg debugdrawdag <<'EOS'
+ > c b
+ > |/
+ > d
+ > |
+ > a
+ > EOS
+ $ hg up -C a
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg tglog
+ o 3: 814f6bd05178 'c'
+ |
+ | o 2: db0e82a16a62 'b'
+ |/
+ o 1: 02952614a83d 'd'
+ |
+ @ 0: b173517d0057 'a'
+
+ $ hg cat -r 3 c
+ c (no-eol)
+ $ hg cat -r 2 b
+ b (no-eol)
+ $ hg rebase --debug -r b -d c | grep rebasing
+ rebasing in-memory
+ rebasing 2:db0e82a16a62 "b" (b)
+ $ hg tglog
+ o 3: ca58782ad1e4 'b'
+ |
+ o 2: 814f6bd05178 'c'
+ |
+ o 1: 02952614a83d 'd'
+ |
+ @ 0: b173517d0057 'a'
+
+ $ hg cat -r 3 b
+ b (no-eol)
+ $ hg cat -r 2 c
+ c (no-eol)
+
+Case 2:
+ $ hg init repo2
+ $ cd repo2
+ $ hg debugdrawdag <<'EOS'
+ > c b
+ > |/
+ > d
+ > |
+ > a
+ > EOS
+
+Add a symlink and executable file:
+ $ hg up -C c
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ ln -s somefile e
+ $ echo f > f
+ $ chmod +x f
+ $ hg add e f
+ $ hg amend -q
+ $ hg up -Cq a
+
+Write files to the working copy, and ensure they're still there after the rebase
+ $ echo "abc" > a
+ $ ln -s def b
+ $ echo "ghi" > c
+ $ echo "jkl" > d
+ $ echo "mno" > e
+ $ hg tglog
+ o 3: f56b71190a8f 'c'
+ |
+ | o 2: db0e82a16a62 'b'
+ |/
+ o 1: 02952614a83d 'd'
+ |
+ @ 0: b173517d0057 'a'
+
+ $ hg cat -r 3 c
+ c (no-eol)
+ $ hg cat -r 2 b
+ b (no-eol)
+ $ hg cat -r 3 e
+ somefile (no-eol)
+ $ hg rebase --debug -s b -d a | grep rebasing
+ rebasing in-memory
+ rebasing 2:db0e82a16a62 "b" (b)
+ $ hg tglog
+ o 3: fc055c3b4d33 'b'
+ |
+ | o 2: f56b71190a8f 'c'
+ | |
+ | o 1: 02952614a83d 'd'
+ |/
+ @ 0: b173517d0057 'a'
+
+ $ hg cat -r 2 c
+ c (no-eol)
+ $ hg cat -r 3 b
+ b (no-eol)
+ $ hg rebase --debug -s 1 -d 3 | grep rebasing
+ rebasing in-memory
+ rebasing 1:02952614a83d "d" (d)
+ rebasing 2:f56b71190a8f "c"
+ $ hg tglog
+ o 3: 753feb6fd12a 'c'
+ |
+ o 2: 09c044d2cb43 'd'
+ |
+ o 1: fc055c3b4d33 'b'
+ |
+ @ 0: b173517d0057 'a'
+
+Ensure working copy files are still there:
+ $ cat a
+ abc
+ $ readlink.py b
+ b -> def
+ $ cat e
+ mno
+
+Ensure symlink and executable files were rebased properly:
+ $ hg up -Cq 3
+ $ readlink.py e
+ e -> somefile
+ $ ls -l f | cut -c -10
+ -rwxr-xr-x
+
+Rebase the working copy parent, which should default to an on-disk merge even if
+we requested in-memory.
+ $ hg up -C 3
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rebase -r 3 -d 0 --debug | grep rebasing
+ rebasing on disk
+ rebasing 3:753feb6fd12a "c" (tip)
+ $ hg tglog
+ @ 3: 844a7de3e617 'c'
+ |
+ | o 2: 09c044d2cb43 'd'
+ | |
+ | o 1: fc055c3b4d33 'b'
+ |/
+ o 0: b173517d0057 'a'
+
+
--- a/tests/test-rebase-interruptions.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-interruptions.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,8 +6,8 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
- > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
+ > tglogp = log -G --template "{rev}: {node|short} {phase} '{desc}' {branches}\n"
> EOF
@@ -44,15 +44,15 @@
$ cd a1
$ hg tglog
- @ 4: 'E'
+ @ 4: ae36e8e3dfd7 'E'
|
- o 3: 'D'
+ o 3: 46b37eabc604 'D'
|
- | o 2: 'C'
+ | o 2: 965c486023db 'C'
| |
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
Rebasing B onto E:
@@ -77,19 +77,19 @@
$ hg phase --force --secret 6
$ hg tglogp
- @ 6:secret 'Extra'
+ @ 6: deb5d2f93d8b secret 'Extra'
|
- | o 5:draft 'B'
+ | o 5: 45396c49d53b draft 'B'
| |
- | o 4:draft 'E'
+ | o 4: ae36e8e3dfd7 draft 'E'
| |
- | o 3:draft 'D'
+ | o 3: 46b37eabc604 draft 'D'
| |
- o | 2:draft 'C'
+ o | 2: 965c486023db draft 'C'
| |
- o | 1:draft 'B'
+ o | 1: 27547f69f254 draft 'B'
|/
- o 0:draft 'A'
+ o 0: 4a2df7238c3b draft 'A'
Resume the rebasing:
@@ -115,21 +115,21 @@
warning: orphaned descendants detected, not stripping 27547f69f254, 965c486023db
$ hg tglogp
- o 7:draft 'C'
+ o 7: d2d25e26288e draft 'C'
|
- | o 6:secret 'Extra'
+ | o 6: deb5d2f93d8b secret 'Extra'
| |
- o | 5:draft 'B'
+ o | 5: 45396c49d53b draft 'B'
| |
- @ | 4:draft 'E'
+ @ | 4: ae36e8e3dfd7 draft 'E'
| |
- o | 3:draft 'D'
+ o | 3: 46b37eabc604 draft 'D'
| |
- | o 2:draft 'C'
+ | o 2: 965c486023db draft 'C'
| |
- | o 1:draft 'B'
+ | o 1: 27547f69f254 draft 'B'
|/
- o 0:draft 'A'
+ o 0: 4a2df7238c3b draft 'A'
$ cd ..
@@ -140,15 +140,15 @@
$ cd a2
$ hg tglog
- @ 4: 'E'
+ @ 4: ae36e8e3dfd7 'E'
|
- o 3: 'D'
+ o 3: 46b37eabc604 'D'
|
- | o 2: 'C'
+ | o 2: 965c486023db 'C'
| |
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
Rebasing B onto E:
@@ -169,19 +169,19 @@
$ hg ci -m 'Extra' --config 'extensions.rebase=!'
$ hg tglog
- @ 6: 'Extra'
+ @ 6: 402ee3642b59 'Extra'
|
- o 5: 'B'
+ o 5: 45396c49d53b 'B'
|
- o 4: 'E'
+ o 4: ae36e8e3dfd7 'E'
|
- o 3: 'D'
+ o 3: 46b37eabc604 'D'
|
- | o 2: 'C'
+ | o 2: 965c486023db 'C'
| |
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
Abort the rebasing:
@@ -190,19 +190,19 @@
rebase aborted
$ hg tglog
- @ 6: 'Extra'
+ @ 6: 402ee3642b59 'Extra'
|
- o 5: 'B'
+ o 5: 45396c49d53b 'B'
|
- o 4: 'E'
+ o 4: ae36e8e3dfd7 'E'
|
- o 3: 'D'
+ o 3: 46b37eabc604 'D'
|
- | o 2: 'C'
+ | o 2: 965c486023db 'C'
| |
- | o 1: 'B'
+ | o 1: 27547f69f254 'B'
|/
- o 0: 'A'
+ o 0: 4a2df7238c3b 'A'
$ cd ..
@@ -212,15 +212,15 @@
$ cd a3
$ hg tglogp
- @ 4:draft 'E'
+ @ 4: ae36e8e3dfd7 draft 'E'
|
- o 3:draft 'D'
+ o 3: 46b37eabc604 draft 'D'
|
- | o 2:draft 'C'
+ | o 2: 965c486023db draft 'C'
| |
- | o 1:draft 'B'
+ | o 1: 27547f69f254 draft 'B'
|/
- o 0:draft 'A'
+ o 0: 4a2df7238c3b draft 'A'
Rebasing B onto E:
@@ -240,17 +240,17 @@
$ hg phase --secret -f 2
$ hg tglogp
- @ 5:public 'B'
+ @ 5: 45396c49d53b public 'B'
|
- o 4:public 'E'
+ o 4: ae36e8e3dfd7 public 'E'
|
- o 3:public 'D'
+ o 3: 46b37eabc604 public 'D'
|
- | o 2:secret 'C'
+ | o 2: 965c486023db secret 'C'
| |
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
Abort the rebasing:
@@ -259,17 +259,17 @@
rebase aborted
$ hg tglogp
- @ 5:public 'B'
+ @ 5: 45396c49d53b public 'B'
|
- o 4:public 'E'
+ o 4: ae36e8e3dfd7 public 'E'
|
- o 3:public 'D'
+ o 3: 46b37eabc604 public 'D'
|
- | o 2:secret 'C'
+ | o 2: 965c486023db secret 'C'
| |
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
Test rebase interrupted by hooks
@@ -292,40 +292,40 @@
abort: precommit hook exited with status 1
[255]
$ hg tglogp
- @ 7:secret 'C'
+ @ 7: 401ccec5e39f secret 'C'
|
- | @ 6:secret 'F'
+ | @ 6: a0b2430ebfb8 secret 'F'
| |
- o | 5:public 'B'
+ o | 5: 45396c49d53b public 'B'
| |
- o | 4:public 'E'
+ o | 4: ae36e8e3dfd7 public 'E'
| |
- o | 3:public 'D'
+ o | 3: 46b37eabc604 public 'D'
| |
- | o 2:secret 'C'
+ | o 2: 965c486023db secret 'C'
| |
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-precommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/hook-precommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg
$ hg tglogp
- @ 6:secret 'F'
+ @ 6: 6e92a149ac6b secret 'F'
|
- o 5:secret 'C'
+ o 5: 401ccec5e39f secret 'C'
|
- o 4:public 'B'
+ o 4: 45396c49d53b public 'B'
|
- o 3:public 'E'
+ o 3: ae36e8e3dfd7 public 'E'
|
- o 2:public 'D'
+ o 2: 46b37eabc604 public 'D'
|
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ cd ..
@@ -347,40 +347,40 @@
abort: pretxncommit hook exited with status 1
[255]
$ hg tglogp
- @ 7:secret 'C'
+ @ 7: 401ccec5e39f secret 'C'
|
- | @ 6:secret 'F'
+ | @ 6: a0b2430ebfb8 secret 'F'
| |
- o | 5:public 'B'
+ o | 5: 45396c49d53b public 'B'
| |
- o | 4:public 'E'
+ o | 4: ae36e8e3dfd7 public 'E'
| |
- o | 3:public 'D'
+ o | 3: 46b37eabc604 public 'D'
| |
- | o 2:secret 'C'
+ | o 2: 965c486023db secret 'C'
| |
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-pretxncommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/hook-pretxncommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg
$ hg tglogp
- @ 6:secret 'F'
+ @ 6: 6e92a149ac6b secret 'F'
|
- o 5:secret 'C'
+ o 5: 401ccec5e39f secret 'C'
|
- o 4:public 'B'
+ o 4: 45396c49d53b public 'B'
|
- o 3:public 'E'
+ o 3: ae36e8e3dfd7 public 'E'
|
- o 2:public 'D'
+ o 2: 46b37eabc604 public 'D'
|
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ cd ..
@@ -397,40 +397,40 @@
abort: pretxnclose hook exited with status 1
[255]
$ hg tglogp
- @ 7:secret 'C'
+ @ 7: 401ccec5e39f secret 'C'
|
- | @ 6:secret 'F'
+ | @ 6: a0b2430ebfb8 secret 'F'
| |
- o | 5:public 'B'
+ o | 5: 45396c49d53b public 'B'
| |
- o | 4:public 'E'
+ o | 4: ae36e8e3dfd7 public 'E'
| |
- o | 3:public 'D'
+ o | 3: 46b37eabc604 public 'D'
| |
- | o 2:secret 'C'
+ | o 2: 965c486023db secret 'C'
| |
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-pretxnclose/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/hook-pretxnclose/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg
$ hg tglogp
- @ 6:secret 'F'
+ @ 6: 6e92a149ac6b secret 'F'
|
- o 5:secret 'C'
+ o 5: 401ccec5e39f secret 'C'
|
- o 4:public 'B'
+ o 4: 45396c49d53b public 'B'
|
- o 3:public 'E'
+ o 3: ae36e8e3dfd7 public 'E'
|
- o 2:public 'D'
+ o 2: 46b37eabc604 public 'D'
|
- | o 1:public 'B'
+ | o 1: 27547f69f254 public 'B'
|/
- o 0:public 'A'
+ o 0: 4a2df7238c3b public 'A'
$ cd ..
@@ -459,7 +459,7 @@
$ hg rebase --continue
rebasing 1:fdaca8533b86 "b"
note: rebase of 1:fdaca8533b86 created no changes to commit
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fdaca8533b86-7fd70513-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fdaca8533b86-7fd70513-rebase.hg
$ hg resolve --list
$ test -f .hg/merge
[1]
--- a/tests/test-rebase-issue-noparam-single-rev.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-issue-noparam-single-rev.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,7 +6,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
@@ -37,15 +37,15 @@
adding r2
$ hg tglog
- @ 4: 'r2'
+ @ 4: 225af64d03e6 'r2'
|
- o 3: 'r1'
+ o 3: 8d0a8c99b309 'r1'
|
- | o 2: 'l1'
+ | o 2: 87c180a611f2 'l1'
|/
- o 1: 'c2'
+ o 1: 56daeba07f4b 'c2'
|
- o 0: 'c1'
+ o 0: e8faad3d03ff 'c1'
Rebase with no arguments - single revision in source branch:
@@ -53,18 +53,18 @@
$ hg rebase
rebasing 2:87c180a611f2 "l1"
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-rebase.hg
$ hg tglog
- @ 4: 'l1'
+ @ 4: b1152cc99655 'l1'
|
- o 3: 'r2'
+ o 3: 225af64d03e6 'r2'
|
- o 2: 'r1'
+ o 2: 8d0a8c99b309 'r1'
|
- o 1: 'c2'
+ o 1: 56daeba07f4b 'c2'
|
- o 0: 'c1'
+ o 0: e8faad3d03ff 'c1'
$ cd ..
@@ -96,15 +96,15 @@
created new head
$ hg tglog
- @ 4: 'r1'
+ @ 4: 8d0a8c99b309 'r1'
|
- | o 3: 'l2'
+ | o 3: 1ac923b736ef 'l2'
| |
- | o 2: 'l1'
+ | o 2: 87c180a611f2 'l1'
|/
- o 1: 'c2'
+ o 1: 56daeba07f4b 'c2'
|
- o 0: 'c1'
+ o 0: e8faad3d03ff 'c1'
Rebase with no arguments - single revision in target branch:
@@ -113,18 +113,18 @@
$ hg rebase
rebasing 2:87c180a611f2 "l1"
rebasing 3:1ac923b736ef "l2"
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-rebase.hg
$ hg tglog
- @ 4: 'l2'
+ @ 4: 023181307ed0 'l2'
|
- o 3: 'l1'
+ o 3: 913ab52b43b4 'l1'
|
- o 2: 'r1'
+ o 2: 8d0a8c99b309 'r1'
|
- o 1: 'c2'
+ o 1: 56daeba07f4b 'c2'
|
- o 0: 'c1'
+ o 0: e8faad3d03ff 'c1'
$ cd ..
--- a/tests/test-rebase-legacy.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-legacy.t Mon Jan 22 17:53:02 2018 -0500
@@ -47,7 +47,7 @@
rebasing 7:d2fa1c02b240 "G" (G)
rebasing 9:6582e6951a9c "H" (H tip)
warning: orphaned descendants detected, not stripping c1e6b162678d, de008c61a447
- saved backup bundle to $TESTTMP/.hg/strip-backup/6f7a236de685-9880a3dc-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/6f7a236de685-9880a3dc-rebase.hg
$ hg log -G -T '{rev}:{node|short} {desc}\n'
o 11:721b8da0a708 H
--- a/tests/test-rebase-mq-skip.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-mq-skip.t Mon Jan 22 17:53:02 2018 -0500
@@ -12,7 +12,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' tags: {tags}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' tags: {tags}\n"
> EOF
@@ -67,14 +67,14 @@
note: rebase of 3:148775c71080 created no changes to commit
rebase merging completed
updating mq patch p0.patch to 5:9ecc820b1737
- $TESTTMP/a/.hg/patches/p0.patch (glob)
+ $TESTTMP/a/.hg/patches/p0.patch
2 changesets found
uncompressed size of bundle content:
348 (changelog)
324 (manifests)
129 p0
129 p1
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-rebase.hg
2 changesets found
uncompressed size of bundle content:
403 (changelog)
@@ -90,13 +90,13 @@
1 revisions have been skipped
$ hg tglog
- @ 3: 'P0' tags: p0.patch qbase qtip tip
+ @ 3: 9ecc820b1737 'P0' tags: p0.patch qbase qtip tip
|
- o 2: 'P1' tags: qparent
+ o 2: 869d8b134a27 'P1' tags: qparent
|
- o 1: 'R1' tags:
+ o 1: da108f2755df 'R1' tags:
|
- o 0: 'C1' tags:
+ o 0: cd320d50b341 'C1' tags:
$ cd ..
@@ -166,26 +166,26 @@
rebasing 5:681a378595ba "r5" (r5)
rebasing 6:512a1f24768b "r6" (qtip r6)
note: rebase of 6:512a1f24768b created no changes to commit
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-rebase.hg
$ hg tglog
- @ 8: 'r5' tags: qtip r5 tip
+ @ 8: 0b9735ce8f0a 'r5' tags: qtip r5 tip
|
- o 7: 'r2' tags: qbase r2
+ o 7: 1660ab13ce9a 'r2' tags: qbase r2
|
- o 6: 'branch2-r6' tags: qparent
+ o 6: 057f55ff8f44 'branch2-r6' tags: qparent
|
- o 5: 'branch2-r4' tags:
+ o 5: 1d7287f8deb1 'branch2-r4' tags:
|
- o 4: 'branch2-r8' tags:
+ o 4: 3c10b9db2bd5 'branch2-r8' tags:
|
- o 3: 'branch2-r7' tags:
+ o 3: b684023158dc 'branch2-r7' tags:
|
- o 2: 'branch2-r3' tags:
+ o 2: d817754b1251 'branch2-r3' tags:
|
- o 1: 'branch2-r1' tags:
+ o 1: 0621a206f8a4 'branch2-r1' tags:
|
- o 0: 'r0' tags:
+ o 0: 222799e2f90b 'r0' tags:
$ cd ..
--- a/tests/test-rebase-mq.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-mq.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
> plain=true
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' tags: {tags}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' tags: {tags}\n"
> EOF
@@ -33,13 +33,13 @@
$ hg qref -m P1 -d '2 0'
$ hg tglog
- @ 3: 'P1' tags: f2.patch qtip tip
+ @ 3: 929394423cd3 'P1' tags: f2.patch qtip tip
|
- o 2: 'P0' tags: f.patch qbase
+ o 2: 3504f44bffc0 'P0' tags: f.patch qbase
|
- | o 1: 'R1' tags:
+ | o 1: bac9ed9960d8 'R1' tags:
|/
- o 0: 'C1' tags: qparent
+ o 0: 36f36ddbca61 'C1' tags: qparent
Rebase - try to rebase on an applied mq patch:
@@ -88,16 +88,16 @@
$ hg rebase -c
already rebased 2:3504f44bffc0 "P0" (f.patch qbase) as ebe9914c0d1c
rebasing 3:929394423cd3 "P1" (f2.patch qtip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-rebase.hg
$ hg tglog
- @ 3: 'P1' tags: f2.patch qtip tip
+ @ 3: 462012cf340c 'P1' tags: f2.patch qtip tip
|
- o 2: 'P0' tags: f.patch qbase
+ o 2: ebe9914c0d1c 'P0' tags: f.patch qbase
|
- o 1: 'R1' tags: qparent
+ o 1: bac9ed9960d8 'R1' tags: qparent
|
- o 0: 'C1' tags:
+ o 0: 36f36ddbca61 'C1' tags:
$ hg up -q qbase
@@ -205,7 +205,7 @@
$ hg rebase -s 2 -d 1
rebasing 2:0c587ffcb480 "P0 (git)" (f_git.patch qbase)
rebasing 3:c7f18665e4bc "P1" (f.patch qtip tip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-rebase.hg
$ hg qci -m 'save patch state'
@@ -298,11 +298,11 @@
foo: +baz
$ hg tglog
- @ 2: 'imported patch bar' tags: bar qtip tip
+ @ 2: 4f44b861d38c 'imported patch bar' tags: bar qtip tip
|
- o 1: 'important commit message' tags: empty-important qbase
+ o 1: 0aaf4c3af7eb 'important commit message' tags: empty-important qbase
|
- o 0: 'a' tags: qparent
+ o 0: cb9a9f314b8b 'a' tags: qparent
Create new head to rebase bar onto:
@@ -318,13 +318,13 @@
$ hg qref
$ hg tglog
- @ 3: '[mq]: bar' tags: bar qtip tip
+ @ 3: d526d4536ed6 '[mq]: bar' tags: bar qtip tip
|
- | o 2: 'b' tags:
+ | o 2: d2ae7f538514 'b' tags:
| |
- o | 1: 'important commit message' tags: empty-important qbase
+ o | 1: 0aaf4c3af7eb 'important commit message' tags: empty-important qbase
|/
- o 0: 'a' tags: qparent
+ o 0: cb9a9f314b8b 'a' tags: qparent
Rebase bar (make sure series order is preserved and empty-important also is
@@ -351,10 +351,10 @@
foo: +baz
$ hg tglog
- @ 2: '[mq]: bar' tags: bar qbase qtip tip
+ @ 2: 477d948bb2af '[mq]: bar' tags: bar qbase qtip tip
|
- o 1: 'b' tags: qparent
+ o 1: d2ae7f538514 'b' tags: qparent
|
- o 0: 'a' tags:
+ o 0: cb9a9f314b8b 'a' tags:
$ cd ..
--- a/tests/test-rebase-named-branches.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-named-branches.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,7 +6,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
$ hg init a
@@ -45,25 +45,25 @@
$ hg ci -m 'dev-two named branch'
$ hg tglog
- @ 9: 'dev-two named branch' dev-two
+ @ 9: cb039b7cae8e 'dev-two named branch' dev-two
|
- | o 8: 'dev-one named branch' dev-one
+ | o 8: 643fc9128048 'dev-one named branch' dev-one
| |
- o | 7: 'H'
+ o | 7: 02de42196ebe 'H'
| |
- +---o 6: 'G'
+ +---o 6: eea13746799a 'G'
| | |
- o | | 5: 'F'
+ o | | 5: 24b6387c8c8c 'F'
| | |
- +---o 4: 'E'
+ +---o 4: 9520eea781bc 'E'
| |
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
Branch name containing a dash (issue3181)
@@ -73,28 +73,28 @@
rebasing 6:eea13746799a "G"
rebasing 7:02de42196ebe "H"
rebasing 9:cb039b7cae8e "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-rebase.hg
$ hg tglog
- @ 9: 'dev-two named branch' dev-two
+ @ 9: 9e70cd31750f 'dev-two named branch' dev-two
|
- o 8: 'H'
+ o 8: 31d0e4ba75e6 'H'
|
- | o 7: 'G'
+ | o 7: 4b988a958030 'G'
|/|
- o | 6: 'F'
+ o | 6: 24de4aff8e28 'F'
| |
- o | 5: 'dev-one named branch' dev-one
+ o | 5: 643fc9128048 'dev-one named branch' dev-one
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
| |
- o | 3: 'D'
+ o | 3: 32af7686d403 'D'
| |
- o | 2: 'C'
+ o | 2: 5fddd98957c8 'C'
| |
- o | 1: 'B'
+ o | 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg rebase -s dev-one -d 0 --keepbranches
rebasing 5:643fc9128048 "dev-one named branch"
@@ -102,28 +102,28 @@
rebasing 7:4b988a958030 "G"
rebasing 8:31d0e4ba75e6 "H"
rebasing 9:9e70cd31750f "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-rebase.hg
$ hg tglog
- @ 9: 'dev-two named branch' dev-two
+ @ 9: 59c2e59309fe 'dev-two named branch' dev-two
|
- o 8: 'H'
+ o 8: 904590360559 'H'
|
- | o 7: 'G'
+ | o 7: 1a1e6f72ec38 'G'
|/|
- o | 6: 'F'
+ o | 6: 42aa3cf0fa7a 'F'
| |
- o | 5: 'dev-one named branch' dev-one
+ o | 5: bc8139ee757c 'dev-one named branch' dev-one
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg update 3
3 files updated, 0 files merged, 3 files removed, 0 files unresolved
@@ -133,27 +133,27 @@
created new head
$ hg tglog
- @ 10: 'dev-one named branch' dev-one
+ @ 10: 643fc9128048 'dev-one named branch' dev-one
|
- | o 9: 'dev-two named branch' dev-two
+ | o 9: 59c2e59309fe 'dev-two named branch' dev-two
| |
- | o 8: 'H'
+ | o 8: 904590360559 'H'
| |
- | | o 7: 'G'
+ | | o 7: 1a1e6f72ec38 'G'
| |/|
- | o | 6: 'F'
+ | o | 6: 42aa3cf0fa7a 'F'
| | |
- | o | 5: 'dev-one named branch' dev-one
+ | o | 5: bc8139ee757c 'dev-one named branch' dev-one
| | |
- | | o 4: 'E'
+ | | o 4: 9520eea781bc 'E'
| |/
- o | 3: 'D'
+ o | 3: 32af7686d403 'D'
| |
- o | 2: 'C'
+ o | 2: 5fddd98957c8 'C'
| |
- o | 1: 'B'
+ o | 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg rebase -b 'max(branch("dev-two"))' -d dev-one --keepbranches
rebasing 5:bc8139ee757c "dev-one named branch"
@@ -162,28 +162,28 @@
rebasing 7:1a1e6f72ec38 "G"
rebasing 8:904590360559 "H"
rebasing 9:59c2e59309fe "dev-two named branch"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-rebase.hg
$ hg tglog
- o 9: 'dev-two named branch' dev-two
+ o 9: 71325f8bc082 'dev-two named branch' dev-two
|
- o 8: 'H'
+ o 8: 12b2bc666e20 'H'
|
- | o 7: 'G'
+ | o 7: 549f007a9f5f 'G'
|/|
- o | 6: 'F'
+ o | 6: 679f28760620 'F'
| |
- @ | 5: 'dev-one named branch' dev-one
+ @ | 5: 643fc9128048 'dev-one named branch' dev-one
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
| |
- o | 3: 'D'
+ o | 3: 32af7686d403 'D'
| |
- o | 2: 'C'
+ o | 2: 5fddd98957c8 'C'
| |
- o | 1: 'B'
+ o | 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg rebase -s 'max(branch("dev-one"))' -d 0 --keepbranches
rebasing 5:643fc9128048 "dev-one named branch"
@@ -191,28 +191,28 @@
rebasing 7:549f007a9f5f "G"
rebasing 8:12b2bc666e20 "H"
rebasing 9:71325f8bc082 "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-rebase.hg
$ hg tglog
- o 9: 'dev-two named branch' dev-two
+ o 9: 3944801ae4ea 'dev-two named branch' dev-two
|
- o 8: 'H'
+ o 8: 8e279d293175 'H'
|
- | o 7: 'G'
+ | o 7: aeefee77ab01 'G'
|/|
- o | 6: 'F'
+ o | 6: e908b85f3729 'F'
| |
- @ | 5: 'dev-one named branch' dev-one
+ @ | 5: bc8139ee757c 'dev-one named branch' dev-one
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg up -r 0 > /dev/null
@@ -222,28 +222,28 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- o 9: 'D'
+ o 9: e9f862ce8bad 'D'
|
- o 8: 'C'
+ o 8: a0d543090fa4 'C'
|
- o 7: 'B'
+ o 7: 3bdb949809d9 'B'
|
- o 6: 'dev-two named branch' dev-two
+ o 6: 3944801ae4ea 'dev-two named branch' dev-two
|
- o 5: 'H'
+ o 5: 8e279d293175 'H'
|
- | o 4: 'G'
+ | o 4: aeefee77ab01 'G'
|/|
- o | 3: 'F'
+ o | 3: e908b85f3729 'F'
| |
- o | 2: 'dev-one named branch' dev-one
+ o | 2: bc8139ee757c 'dev-one named branch' dev-one
| |
- | o 1: 'E'
+ | o 1: 9520eea781bc 'E'
|/
- @ 0: 'A'
+ @ 0: cd010b8cd998 'A'
$ hg rebase -s 5 -d 6
abort: source and destination form a cycle
@@ -254,28 +254,28 @@
rebasing 7:3bdb949809d9 "B"
rebasing 8:a0d543090fa4 "C"
rebasing 9:e9f862ce8bad "D" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-rebase.hg
$ hg tglog
- o 9: 'D'
+ o 9: e522577ccdbd 'D'
|
- o 8: 'C'
+ o 8: 810110211f50 'C'
|
- o 7: 'B'
+ o 7: 160b0930ccc6 'B'
|
- o 6: 'dev-two named branch'
+ o 6: c57724c84928 'dev-two named branch'
|
- o 5: 'H'
+ o 5: 8e279d293175 'H'
|
- | o 4: 'G'
+ | o 4: aeefee77ab01 'G'
|/|
- o | 3: 'F'
+ o | 3: e908b85f3729 'F'
| |
- o | 2: 'dev-one named branch' dev-one
+ o | 2: bc8139ee757c 'dev-one named branch' dev-one
| |
- | o 1: 'E'
+ | o 1: 9520eea781bc 'E'
|/
- @ 0: 'A'
+ @ 0: cd010b8cd998 'A'
Reopen branch by rebase
@@ -291,7 +291,7 @@
rebasing 7:160b0930ccc6 "B"
rebasing 8:810110211f50 "C"
rebasing 9:e522577ccdbd "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-rebase.hg
$ cd ..
@@ -314,13 +314,13 @@
$ hg ci -m 'c1'
$ hg tglog
- @ 3: 'c1' c
+ @ 3: c062e3ecd6c6 'c1' c
|
- | o 2: 'b2' b
+ | o 2: 792845bb77ee 'b2' b
|/
- | o 1: 'b1' b
+ | o 1: 40039acb7ca5 'b1' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
$ hg clone -q . ../case2
@@ -330,13 +330,13 @@
$ hg rebase
rebasing 2:792845bb77ee "b2"
note: rebase of 2:792845bb77ee created no changes to commit
- saved backup bundle to $TESTTMP/case1/.hg/strip-backup/792845bb77ee-627120ee-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/case1/.hg/strip-backup/792845bb77ee-627120ee-rebase.hg
$ hg tglog
- o 2: 'c1' c
+ o 2: c062e3ecd6c6 'c1' c
|
- | @ 1: 'b1' b
+ | @ 1: 40039acb7ca5 'b1' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
rebase 'b1' on top of the tip of the branch ('b2') - ignoring the tip branch ('c1')
@@ -345,15 +345,15 @@
$ hg up -qr 1
$ hg rebase
rebasing 1:40039acb7ca5 "b1"
- saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-rebase.hg
$ hg tglog
- @ 3: 'b1' b
+ @ 3: 76abc1c6f8c7 'b1' b
|
- | o 2: 'c1' c
+ | o 2: c062e3ecd6c6 'c1' c
| |
- o | 1: 'b2' b
+ o | 1: 792845bb77ee 'b2' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
rebase 'c1' to the branch head 'c2' that is closed
@@ -362,30 +362,30 @@
$ hg ci -qm 'c2 closed' --close
$ hg up -qr 2
$ hg tglog
- _ 4: 'c2 closed' c
+ _ 4: 8427af5d86f2 'c2 closed' c
|
- o 3: 'b1' b
+ o 3: 76abc1c6f8c7 'b1' b
|
- | @ 2: 'c1' c
+ | @ 2: c062e3ecd6c6 'c1' c
| |
- o | 1: 'b2' b
+ o | 1: 792845bb77ee 'b2' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
$ hg rebase
abort: branch 'c' has one head - please rebase to an explicit rev
(run 'hg heads' to see all heads)
[255]
$ hg tglog
- _ 4: 'c2 closed' c
+ _ 4: 8427af5d86f2 'c2 closed' c
|
- o 3: 'b1' b
+ o 3: 76abc1c6f8c7 'b1' b
|
- | @ 2: 'c1' c
+ | @ 2: c062e3ecd6c6 'c1' c
| |
- o | 1: 'b2' b
+ o | 1: 792845bb77ee 'b2' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
$ hg up -cr 1
@@ -396,15 +396,15 @@
rebasing 3:76abc1c6f8c7 "b1"
rebasing 4:8427af5d86f2 "c2 closed" (tip)
note: rebase of 4:8427af5d86f2 created no changes to commit
- saved backup bundle to $TESTTMP/case2/.hg/strip-backup/76abc1c6f8c7-cd698d13-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/case2/.hg/strip-backup/76abc1c6f8c7-cd698d13-rebase.hg
$ hg tglog
- o 3: 'b1' x
+ o 3: 117b0ed08075 'b1' x
|
- | o 2: 'c1' c
+ | o 2: c062e3ecd6c6 'c1' c
| |
- @ | 1: 'b2' b
+ @ | 1: 792845bb77ee 'b2' b
|/
- o 0: '0'
+ o 0: d681519c3ea7 '0'
$ cd ..
--- a/tests/test-rebase-newancestor.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-newancestor.t Mon Jan 22 17:53:02 2018 -0500
@@ -5,7 +5,7 @@
> rebase=
> drawdag=$TESTDIR/drawdag.py
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
$ hg init repo
@@ -31,29 +31,29 @@
created new head
$ hg tglog
- @ 3: 'AD'
+ @ 3: 3878212183bd 'AD'
|
- | o 2: 'C'
+ | o 2: 30ae917c0e4f 'C'
| |
- | o 1: 'B'
+ | o 1: 0f4f7cb4f549 'B'
|/
- o 0: 'A'
+ o 0: 1e635d440a73 'A'
$ hg rebase -s 1 -d 3
rebasing 1:0f4f7cb4f549 "B"
merging a
rebasing 2:30ae917c0e4f "C"
merging a
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-rebase.hg
$ hg tglog
- o 3: 'C'
+ o 3: 25773bc4b4b0 'C'
|
- o 2: 'B'
+ o 2: c09015405f75 'B'
|
- @ 1: 'AD'
+ @ 1: 3878212183bd 'AD'
|
- o 0: 'A'
+ o 0: 1e635d440a73 'A'
$ cd ..
@@ -108,21 +108,21 @@
$ hg ci -Aqm 'default: f-other stuff'
$ hg tglog
- @ 7: 'default: f-other stuff'
+ @ 7: e08089805d82 'default: f-other stuff'
|
- | o 6: 'dev: merge default' dev
+ | o 6: 9455ee510502 'dev: merge default' dev
|/|
- o | 5: 'default: remove f-default'
+ o | 5: 462860db70a1 'default: remove f-default'
| |
- | o 4: 'dev: merge default' dev
+ | o 4: 4b019212aaf6 'dev: merge default' dev
|/|
- o | 3: 'default: f-default stuff'
+ o | 3: f157ecfd2b6b 'default: f-default stuff'
| |
- | o 2: 'dev: f-dev stuff' dev
+ | o 2: ec2c14fb2984 'dev: f-dev stuff' dev
| |
- | o 1: 'dev: create branch' dev
+ | o 1: 1d1a643d390e 'dev: create branch' dev
|/
- o 0: 'default: create f-default'
+ o 0: e90e8eb90b6f 'default: create f-default'
$ hg clone -qU . ../ancestor-merge-2
@@ -138,21 +138,21 @@
other [source] changed f-default which local [dest] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-rebase.hg
$ hg tglog
- o 6: 'dev: merge default'
+ o 6: fbc098e72227 'dev: merge default'
|
- o 5: 'dev: merge default'
+ o 5: eda7b7f46f5d 'dev: merge default'
|
- o 4: 'dev: f-dev stuff'
+ o 4: 3e075b1c0a40 'dev: f-dev stuff'
|
- @ 3: 'default: f-other stuff'
+ @ 3: e08089805d82 'default: f-other stuff'
|
- o 2: 'default: remove f-default'
+ o 2: 462860db70a1 'default: remove f-default'
|
- o 1: 'default: f-default stuff'
+ o 1: f157ecfd2b6b 'default: f-default stuff'
|
- o 0: 'default: create f-default'
+ o 0: e90e8eb90b6f 'default: create f-default'
Grafty cherry picking rebasing:
@@ -167,23 +167,23 @@
other [source] changed f-default which local [dest] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-rebase.hg
$ hg tglog
- o 7: 'dev: merge default'
+ o 7: fbc098e72227 'dev: merge default'
|
- o 6: 'dev: merge default'
+ o 6: eda7b7f46f5d 'dev: merge default'
|
- o 5: 'dev: f-dev stuff'
+ o 5: 3e075b1c0a40 'dev: f-dev stuff'
|
- o 4: 'default: f-other stuff'
+ o 4: e08089805d82 'default: f-other stuff'
|
- o 3: 'default: remove f-default'
+ o 3: 462860db70a1 'default: remove f-default'
|
- o 2: 'default: f-default stuff'
+ o 2: f157ecfd2b6b 'default: f-default stuff'
|
- | o 1: 'dev: create branch' dev
+ | o 1: 1d1a643d390e 'dev: create branch' dev
|/
- o 0: 'default: create f-default'
+ o 0: e90e8eb90b6f 'default: create f-default'
$ cd ..
@@ -225,21 +225,21 @@
summary: merge p1 1=ancestor p2 3=outside
$ hg tglog
- @ 5: 'merge p1 1=ancestor p2 3=outside'
+ @ 5: a57575f79074 'merge p1 1=ancestor p2 3=outside'
|\
- +---o 4: 'merge p1 3=outside p2 1=ancestor'
+ +---o 4: 6990226659be 'merge p1 3=outside p2 1=ancestor'
| |/
- | o 3: 'outside'
+ | o 3: f59da8fc0fcf 'outside'
| |
- +---o 2: 'target'
+ +---o 2: a60552eb93fb 'target'
| |
- o | 1: 'change'
+ o | 1: dd40c13f7a6f 'change'
|/
- o 0: 'common'
+ o 0: 02f0f58d5300 'common'
$ hg rebase -r 4 -d 2
rebasing 4:6990226659be "merge p1 3=outside p2 1=ancestor"
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-rebase.hg
$ hg tip
changeset: 5:cca50676b1c5
tag: tip
@@ -251,7 +251,7 @@
$ hg rebase -r 4 -d 2
rebasing 4:a57575f79074 "merge p1 1=ancestor p2 3=outside"
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-rebase.hg
$ hg tip
changeset: 5:f9daf77ffe76
tag: tip
@@ -262,17 +262,17 @@
summary: merge p1 1=ancestor p2 3=outside
$ hg tglog
- @ 5: 'merge p1 1=ancestor p2 3=outside'
+ @ 5: f9daf77ffe76 'merge p1 1=ancestor p2 3=outside'
|\
- +---o 4: 'merge p1 3=outside p2 1=ancestor'
+ +---o 4: cca50676b1c5 'merge p1 3=outside p2 1=ancestor'
| |/
- | o 3: 'outside'
+ | o 3: f59da8fc0fcf 'outside'
| |
- o | 2: 'target'
+ o | 2: a60552eb93fb 'target'
| |
- o | 1: 'change'
+ o | 1: dd40c13f7a6f 'change'
|/
- o 0: 'common'
+ o 0: 02f0f58d5300 'common'
rebase of merge of ancestors
@@ -307,7 +307,7 @@
199 (changelog)
216 (manifests)
182 other
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-rebase.hg
1 changesets found
uncompressed size of bundle content:
254 (changelog)
@@ -320,19 +320,19 @@
added 1 changesets with 1 changes to 1 files
rebase completed
$ hg tglog
- @ 6: 'merge rebase ancestors'
+ @ 6: 113755df812b 'merge rebase ancestors'
|
- o 5: 'merge p1 1=ancestor p2 3=outside'
+ o 5: f9daf77ffe76 'merge p1 1=ancestor p2 3=outside'
|\
- +---o 4: 'merge p1 3=outside p2 1=ancestor'
+ +---o 4: cca50676b1c5 'merge p1 3=outside p2 1=ancestor'
| |/
- | o 3: 'outside'
+ | o 3: f59da8fc0fcf 'outside'
| |
- o | 2: 'target'
+ o | 2: a60552eb93fb 'target'
| |
- o | 1: 'change'
+ o | 1: dd40c13f7a6f 'change'
|/
- o 0: 'common'
+ o 0: 02f0f58d5300 'common'
Due to the limitation of 3-way merge algorithm (1 merge base), rebasing a merge
may include unwanted content:
@@ -374,7 +374,7 @@
rebasing 3:c1e6b162678d "B" (B)
rebasing 4:d6003a550c2c "C" (C)
rebasing 5:c8f78076273e "D" (D tip)
- saved backup bundle to $TESTTMP/dual-merge-base2/.hg/strip-backup/d6003a550c2c-6f1424b6-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/dual-merge-base2/.hg/strip-backup/d6003a550c2c-6f1424b6-rebase.hg
$ hg manifest -r 'desc(D)'
B
C
@@ -395,7 +395,7 @@
$ hg rebase -r D+F -d Z
rebasing 3:004dc1679908 "D" (D)
rebasing 5:4be4cbf6f206 "F" (F tip)
- saved backup bundle to $TESTTMP/chosen-merge-base1/.hg/strip-backup/004dc1679908-06a66a3c-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/chosen-merge-base1/.hg/strip-backup/004dc1679908-06a66a3c-rebase.hg
$ hg manifest -r 'desc(F)'
C
D
@@ -416,7 +416,7 @@
$ hg rebase -r E+F -d Z
rebasing 4:974e4943c210 "E" (E)
rebasing 5:4be4cbf6f206 "F" (F tip)
- saved backup bundle to $TESTTMP/chosen-merge-base2/.hg/strip-backup/974e4943c210-b2874da5-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/chosen-merge-base2/.hg/strip-backup/974e4943c210-b2874da5-rebase.hg
$ hg manifest -r 'desc(F)'
B
D
--- a/tests/test-rebase-obsolete.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-obsolete.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,7 +6,7 @@
$ cat >> $HGRCPATH << EOF
> [ui]
- > logtemplate= {rev}:{node|short} {desc|firstline}
+ > logtemplate= {rev}:{node|short} {desc|firstline}{if(obsolete,' ({obsfate})')}
> [experimental]
> evolution.createmarkers=True
> evolution.allowunstable=True
@@ -94,18 +94,18 @@
| |
o | 4:9520eea781bc E
|/
- | x 3:32af7686d403 D
+ | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33)
| |
- | x 2:5fddd98957c8 C
+ | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063)
| |
- | x 1:42ccdea3bb16 B
+ | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2)
|/
o 0:cd010b8cd998 A
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
$ cd ..
@@ -164,35 +164,36 @@
| |
o | 4:9520eea781bc E
|/
- | x 3:32af7686d403 D
+ | x 3:32af7686d403 D (pruned using rebase)
| |
- | x 2:5fddd98957c8 C
+ | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac)
| |
- | x 1:42ccdea3bb16 B
+ | x 1:42ccdea3bb16 B (pruned using rebase)
|/
o 0:cd010b8cd998 A
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
More complex case where part of the rebase set were already rebased
$ hg rebase --rev 'desc(D)' --dest 'desc(H)'
rebasing 9:08483444fef9 "D"
+ 1 new orphan changesets
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
+ 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
$ hg log -G
@ 11:4596109a6a43 D
|
- | o 10:5ae4c968c6ac C
+ | * 10:5ae4c968c6ac C
| |
- | x 9:08483444fef9 D
+ | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43)
| |
| o 8:8877864f1edb B
| |
@@ -211,12 +212,12 @@
note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D" (tip)
rebasing 10:5ae4c968c6ac "C"
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
+ 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
$ hg log --rev 'contentdivergent()'
$ hg log -G
o 13:98f6af4ee953 C
@@ -280,6 +281,8 @@
even though it is hidden (until we're moved there).
$ hg --hidden up -qr 'first(hidden())'
+ updating to a hidden changeset 42ccdea3bb16
+ (hidden revision '42ccdea3bb16' is pruned)
$ hg rebase --rev 13 --dest 15
rebasing 13:98f6af4ee953 "C"
$ hg log -G
@@ -299,7 +302,7 @@
| |
o | 4:9520eea781bc E
|/
- | @ 1:42ccdea3bb16 B
+ | @ 1:42ccdea3bb16 B (pruned using rebase)
|/
o 0:cd010b8cd998 A
@@ -341,36 +344,70 @@
| |
o | 4:9520eea781bc E
|/
- | x 3:32af7686d403 D
+ | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b)
| |
- | x 2:5fddd98957c8 C
+ | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b)
| |
- | x 1:42ccdea3bb16 B
+ | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b)
|/
o 0:cd010b8cd998 A
$ hg id --debug -r tip
4dc2197e807bae9817f09905b50ab288be2dbbcf tip
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'rebase', 'user': 'test'}
$ cd ..
Rebase set has hidden descendants
---------------------------------
-We rebase a changeset which has a hidden changeset. The hidden changeset must
-not be rebased.
+We rebase a changeset which has hidden descendants. Hidden changesets must not
+be rebased.
$ hg clone base hidden
updating to branch default
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd hidden
+ $ hg log -G
+ @ 7:02de42196ebe H
+ |
+ | o 6:eea13746799a G
+ |/|
+ o | 5:24b6387c8c8c F
+ | |
+ | o 4:9520eea781bc E
+ |/
+ | o 3:32af7686d403 D
+ | |
+ | o 2:5fddd98957c8 C
+ | |
+ | o 1:42ccdea3bb16 B
+ |/
+ o 0:cd010b8cd998 A
+
$ hg rebase -s 5fddd98957c8 -d eea13746799a
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
+ $ hg log -G
+ o 9:cf44d2f5a9f4 D
+ |
+ o 8:e273c5e7d2d2 C
+ |
+ | @ 7:02de42196ebe H
+ | |
+ o | 6:eea13746799a G
+ |\|
+ | o 5:24b6387c8c8c F
+ | |
+ o | 4:9520eea781bc E
+ |/
+ | o 1:42ccdea3bb16 B
+ |/
+ o 0:cd010b8cd998 A
+
$ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
rebasing 1:42ccdea3bb16 "B"
$ hg log -G
@@ -405,18 +442,18 @@
| |
| o 4:9520eea781bc E
|/
- | x 3:32af7686d403 D
+ | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4)
| |
- | x 2:5fddd98957c8 C
+ | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2)
| |
- | x 1:42ccdea3bb16 B
+ | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99)
|/
o 0:cd010b8cd998 A
$ hg debugobsolete
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
Test that rewriting leaving instability behind is allowed
---------------------------------------------------------------------
@@ -425,14 +462,15 @@
9:cf44d2f5a9f4 D (no-eol)
$ hg rebase -r 8
rebasing 8:e273c5e7d2d2 "C"
+ 1 new orphan changesets
$ hg log -G
o 11:0d8f238b634c C
|
o 10:7c6027df6a99 B
|
- | o 9:cf44d2f5a9f4 D
+ | * 9:cf44d2f5a9f4 D
| |
- | x 8:e273c5e7d2d2 C
+ | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c)
| |
@ | 7:02de42196ebe H
| |
@@ -460,9 +498,9 @@
|
| o 12:102b4c1d889b D
|/
- | o 10:7c6027df6a99 B
+ | * 10:7c6027df6a99 B
| |
- | x 7:02de42196ebe H
+ | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697)
| |
+---o 6:eea13746799a G
| |/
@@ -547,6 +585,7 @@
rebasing 3:32af7686d403 "D"
rebasing 7:02de42196ebe "H"
rebasing 9:4bde274eefcf "I" (tip)
+ 1 new orphan changesets
$ hg log -G
@ 12:acd174b7ab39 I
|
@@ -554,9 +593,9 @@
|
| o 10:b5313c85b22e D
|/
- | o 8:53a6a128b2b7 M
+ | * 8:53a6a128b2b7 M
| |\
- | | x 7:02de42196ebe H
+ | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97)
| | |
o---+ 6:eea13746799a G
| | |
@@ -564,7 +603,7 @@
| | |
o---+ 4:9520eea781bc E
/ /
- x | 3:32af7686d403 D
+ x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e)
| |
o | 2:5fddd98957c8 C
| |
@@ -576,30 +615,34 @@
Test hidden changesets in the rebase set (issue4504)
$ hg up --hidden 9
+ updating to a hidden changeset 4bde274eefcf
+ (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39)
3 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo J > J
$ hg add J
$ hg commit -m J
+ 1 new orphan changesets
$ hg debugobsolete `hg log --rev . -T '{node}'`
obsoleted 1 changesets
$ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
rebasing 9:4bde274eefcf "I"
rebasing 13:06edfc82198f "J" (tip)
+ 2 new content-divergent changesets
$ hg log -G
@ 15:5ae8a643467b J
|
- o 14:9ad579b4a5de I
+ * 14:9ad579b4a5de I
|
- | o 12:acd174b7ab39 I
+ | * 12:acd174b7ab39 I
| |
| o 11:6c11a6218c97 H
| |
o | 10:b5313c85b22e D
|/
- | o 8:53a6a128b2b7 M
+ | * 8:53a6a128b2b7 M
| |\
- | | x 7:02de42196ebe H
+ | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97)
| | |
o---+ 6:eea13746799a G
| | |
@@ -607,7 +650,7 @@
| | |
o---+ 4:9520eea781bc E
/ /
- x | 3:32af7686d403 D
+ x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e)
| |
o | 2:5fddd98957c8 C
| |
@@ -620,6 +663,7 @@
$ echo "K" > K
$ hg add K
$ hg commit --amend -m "K"
+ 1 new orphan changesets
$ echo "L" > L
$ hg add L
$ hg commit -m "L"
@@ -628,26 +672,27 @@
$ echo "M" > M
$ hg add M
$ hg commit --amend -m "M"
+ 1 new orphan changesets
$ hg log -G
@ 18:bfaedf8eb73b M
|
- | o 17:97219452e4bd L
+ | * 17:97219452e4bd L
| |
- | x 16:fc37a630c901 K
+ | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b)
|/
- | o 15:5ae8a643467b J
+ | * 15:5ae8a643467b J
| |
- | x 14:9ad579b4a5de I
+ | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901)
|/
- | o 12:acd174b7ab39 I
+ | * 12:acd174b7ab39 I
| |
| o 11:6c11a6218c97 H
| |
o | 10:b5313c85b22e D
|/
- | o 8:53a6a128b2b7 M
+ | * 8:53a6a128b2b7 M
| |\
- | | x 7:02de42196ebe H
+ | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97)
| | |
o---+ 6:eea13746799a G
| | |
@@ -655,7 +700,7 @@
| | |
o---+ 4:9520eea781bc E
/ /
- x | 3:32af7686d403 D
+ x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e)
| |
o | 2:5fddd98957c8 C
| |
@@ -666,6 +711,7 @@
$ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True
note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K"
rebasing 15:5ae8a643467b "J"
+ 1 new orphan changesets
$ cd ..
@@ -691,10 +737,22 @@
$ hg commit --amend -m B1
$ hg commit --amend -m B2
$ hg up --hidden 'desc(B0)'
+ updating to a hidden changeset a8b11f55fb19
+ (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo C > C
$ hg add C
$ hg commit -m C
+ 1 new orphan changesets
+ $ hg log -G
+ @ 4:212cb178bcbb C
+ |
+ | o 3:261e70097290 B2
+ | |
+ x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290)
+ |/
+ o 0:4a2df7238c3b A
+
Rebase finds its way in a chain of marker
@@ -705,12 +763,27 @@
Even when the chain include missing node
$ hg up --hidden 'desc(B0)'
+ updating to a hidden changeset a8b11f55fb19
+ (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290)
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo D > D
$ hg add D
$ hg commit -m D
+ 1 new orphan changesets
$ hg --hidden strip -r 'desc(B1)'
- saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob)
+ saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg
+ 1 new orphan changesets
+ $ hg log -G
+ @ 5:1a79b7535141 D
+ |
+ | o 4:ff2c4d47b71d C
+ | |
+ | o 2:261e70097290 B2
+ | |
+ x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290)
+ |/
+ o 0:4a2df7238c3b A
+
$ hg rebase -d 'desc(B2)'
note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
@@ -738,6 +811,7 @@
$ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg rebase -d 6 -r "4::"
rebasing 4:ff2c4d47b71d "C"
note: not rebasing 7:360bbaa7d3ce "O", it has no successor
@@ -766,6 +840,19 @@
created new head
$ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true
obsoleted 1 changesets
+ $ hg log -G
+ @ 11:f44da1f4954c nonrelevant (pruned)
+ |
+ | o 10:121d9e3bc4c6 P
+ |/
+ o 9:4be60e099a77 C
+ |
+ o 6:9c48361117de D
+ |
+ o 2:261e70097290 B2
+ |
+ o 0:4a2df7238c3b A
+
$ hg rebase -r . -d 10
note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
@@ -795,10 +882,13 @@
$ hg add bar
$ hg commit --amend -m "10'"
$ hg up 10 --hidden
+ updating to a hidden changeset 121d9e3bc4c6
+ (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2)
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo "bar" > foo
$ hg add foo
$ hg commit -m "bar foo"
+ 1 new orphan changesets
$ hg log -G
@ 14:73568ab6879d bar foo
|
@@ -806,7 +896,7 @@
| |
| | o 12:3eb461388009 john doe
| |/
- x | 10:121d9e3bc4c6 P
+ x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2)
|/
o 9:4be60e099a77 C
|
@@ -835,7 +925,7 @@
| |
| | o 12:3eb461388009 john doe
| |/
- x | 10:121d9e3bc4c6 P
+ x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2)
|/
o 9:4be60e099a77 C
|
@@ -850,6 +940,7 @@
$ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True
rebasing 10:121d9e3bc4c6 "P"
rebasing 14:73568ab6879d "bar foo" (tip)
+ 2 new content-divergent changesets
$ hg summary
parent: 16:61bd55f69bc4 tip
bar foo
@@ -889,11 +980,12 @@
$ hg commit -m "dummy change"
$ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg log -G -r 16::
@ 21:7bdc8a87673d dummy change
|
- x 20:8b31da3c4919 dummy change
+ x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51)
|
o 19:b82fb57ea638 willconflict second version
|
@@ -920,6 +1012,214 @@
rebasing 21:7bdc8a87673d "dummy change" (tip)
$ cd ..
+Divergence cases due to obsolete changesets
+-------------------------------------------
+
+We should ignore branches with unstable changesets when they are based on an
+obsolete changeset which successor is in rebase set.
+
+ $ hg init divergence
+ $ cd divergence
+ $ cat >> .hg/hgrc << EOF
+ > [extensions]
+ > strip =
+ > [alias]
+ > strip = strip --no-backup --quiet
+ > [templates]
+ > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n'
+ > EOF
+
+ $ hg debugdrawdag <<EOF
+ > e f
+ > | |
+ > d' d # replace: d -> d'
+ > \ /
+ > c
+ > |
+ > x b
+ > \|
+ > a
+ > EOF
+ 1 new orphan changesets
+ $ hg log -G -r 'a'::
+ * 7:1143e9adc121 f
+ |
+ | o 6:d60ebfa0f1cb e
+ | |
+ | o 5:027ad6c5830d d'
+ | |
+ x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d)
+ |/
+ o 3:a82ac2b38757 c
+ |
+ | o 2:630d7c95eff7 x
+ | |
+ o | 1:488e1b7e7341 b
+ |/
+ o 0:b173517d0057 a
+
+
+Changeset d and its descendants are excluded to avoid divergence of d, which
+would occur because the successor of d (d') is also in rebaseset. As a
+consequence f (descendant of d) is left behind.
+
+ $ hg rebase -b 'e' -d 'x'
+ rebasing 1:488e1b7e7341 "b" (b)
+ rebasing 3:a82ac2b38757 "c" (c)
+ rebasing 5:027ad6c5830d "d'" (d')
+ rebasing 6:d60ebfa0f1cb "e" (e)
+ note: not rebasing 4:76be324c128b "d" (d) and its descendants as this would cause divergence
+ $ hg log -G -r 'a'::
+ o 11:eb6d63fc4ed5 e
+ |
+ o 10:44d8c724a70c d'
+ |
+ o 9:d008e6b4d3fd c
+ |
+ o 8:67e8f4a16c49 b
+ |
+ | * 7:1143e9adc121 f
+ | |
+ | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5)
+ | | |
+ | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c)
+ | | |
+ | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d)
+ | |/
+ | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd)
+ | |
+ o | 2:630d7c95eff7 x
+ | |
+ | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49)
+ |/
+ o 0:b173517d0057 a
+
+ $ hg strip -r 8:
+
+If the rebase set has an obsolete (d) with a successor (d') outside the rebase
+set and none in destination, we still get the divergence warning.
+By allowing divergence, we can perform the rebase.
+
+ $ hg rebase -r 'c'::'f' -d 'x'
+ abort: this rebase will cause divergences from: 76be324c128b
+ (to force the rebase please set experimental.evolution.allowdivergence=True)
+ [255]
+ $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x'
+ rebasing 3:a82ac2b38757 "c" (c)
+ rebasing 4:76be324c128b "d" (d)
+ rebasing 7:1143e9adc121 "f" (f tip)
+ 1 new orphan changesets
+ 2 new content-divergent changesets
+ $ hg log -G -r 'a':: -T instabilities
+ o 10:e1744ea07510 f
+ |
+ * 9:e2b36ea9a0a0 d (content-divergent)
+ |
+ o 8:6a0376de376e c
+ |
+ | x 7:1143e9adc121 f
+ | |
+ | | * 6:d60ebfa0f1cb e (orphan)
+ | | |
+ | | * 5:027ad6c5830d d' (orphan content-divergent)
+ | | |
+ | x | 4:76be324c128b d
+ | |/
+ | x 3:a82ac2b38757 c
+ | |
+ o | 2:630d7c95eff7 x
+ | |
+ | o 1:488e1b7e7341 b
+ |/
+ o 0:b173517d0057 a
+
+ $ hg strip -r 8:
+
+(Not skipping obsoletes means that divergence is allowed.)
+
+ $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x'
+ rebasing 3:a82ac2b38757 "c" (c)
+ rebasing 4:76be324c128b "d" (d)
+ rebasing 7:1143e9adc121 "f" (f tip)
+ 1 new orphan changesets
+ 2 new content-divergent changesets
+
+ $ hg strip -r 0:
+
+Similar test on a more complex graph
+
+ $ hg debugdrawdag <<EOF
+ > g
+ > |
+ > f e
+ > | |
+ > e' d # replace: e -> e'
+ > \ /
+ > c
+ > |
+ > x b
+ > \|
+ > a
+ > EOF
+ 1 new orphan changesets
+ $ hg log -G -r 'a':
+ * 8:2876ce66c6eb g
+ |
+ | o 7:3ffec603ab53 f
+ | |
+ x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea)
+ | |
+ | o 5:63324dc512ea e'
+ | |
+ o | 4:76be324c128b d
+ |/
+ o 3:a82ac2b38757 c
+ |
+ | o 2:630d7c95eff7 x
+ | |
+ o | 1:488e1b7e7341 b
+ |/
+ o 0:b173517d0057 a
+
+ $ hg rebase -b 'f' -d 'x'
+ rebasing 1:488e1b7e7341 "b" (b)
+ rebasing 3:a82ac2b38757 "c" (c)
+ rebasing 5:63324dc512ea "e'" (e')
+ rebasing 7:3ffec603ab53 "f" (f)
+ rebasing 4:76be324c128b "d" (d)
+ note: not rebasing 6:e36fae928aec "e" (e) and its descendants as this would cause divergence
+ $ hg log -G -r 'a':
+ o 13:a1707a5b7c2c d
+ |
+ | o 12:ef6251596616 f
+ | |
+ | o 11:b6f172e64af9 e'
+ |/
+ o 10:d008e6b4d3fd c
+ |
+ o 9:67e8f4a16c49 b
+ |
+ | * 8:2876ce66c6eb g
+ | |
+ | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616)
+ | | |
+ | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea)
+ | | |
+ | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9)
+ | | |
+ | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c)
+ | |/
+ | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd)
+ | |
+ o | 2:630d7c95eff7 x
+ | |
+ | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49)
+ |/
+ o 0:b173517d0057 a
+
+
+ $ cd ..
+
Rebase merge where successor of one parent is equal to destination (issue5198)
$ hg init p1-succ-is-dest
@@ -932,6 +1232,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d B -s D
note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
@@ -939,11 +1240,11 @@
$ hg log -G
o 5:50e9d60b99c6 F
|\
- | | x 4:66f1a38021c9 F
+ | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6)
| |/|
| o | 3:7fb047a69f22 E
| | |
- | | x 2:b18e25de2cf5 D
+ | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961)
| |/
o | 1:112478962961 B
|/
@@ -963,6 +1264,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d B -s E
note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
@@ -970,9 +1272,9 @@
$ hg log -G
o 5:aae1787dacee F
|\
- | | x 4:66f1a38021c9 F
+ | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee)
| |/|
- | | x 3:7fb047a69f22 E
+ | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961)
| | |
| o | 2:b18e25de2cf5 D
| |/
@@ -994,6 +1296,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d C -s D
note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
@@ -1002,13 +1305,13 @@
$ hg log -G
o 6:0913febf6439 F
|\
- +---x 5:66f1a38021c9 F
+ +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439)
| | |
| o | 4:26805aba1e60 C
| | |
o | | 3:7fb047a69f22 E
| | |
- +---x 2:b18e25de2cf5 D
+ +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961)
| |
| o 1:112478962961 B
|/
@@ -1028,6 +1331,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d C -s E
note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
@@ -1035,11 +1339,11 @@
$ hg log -G
o 6:c6ab0cc6d220 F
|\
- +---x 5:66f1a38021c9 F
+ +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220)
| | |
| o | 4:26805aba1e60 C
| | |
- | | x 3:7fb047a69f22 E
+ | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961)
| | |
o---+ 2:b18e25de2cf5 D
/ /
@@ -1061,6 +1365,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d C -b F
rebasing 2:b18e25de2cf5 "D" (D)
@@ -1070,13 +1375,13 @@
$ hg log -G
o 6:8f47515dda15 D
|
- | x 5:66f1a38021c9 F
+ | x 5:66f1a38021c9 F (pruned using rebase)
| |\
o | | 4:26805aba1e60 C
| | |
- | | x 3:7fb047a69f22 E
+ | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961)
| | |
- | x | 2:b18e25de2cf5 D
+ | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15)
| |/
o / 1:112478962961 B
|/
@@ -1096,6 +1401,7 @@
> \|/
> A
> EOF
+ 1 new orphan changesets
$ hg rebase -d C -b F
note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
@@ -1106,13 +1412,13 @@
$ hg log -G
o 6:533690786a86 E
|
- | x 5:66f1a38021c9 F
+ | x 5:66f1a38021c9 F (pruned using rebase)
| |\
o | | 4:26805aba1e60 C
| | |
- | | x 3:7fb047a69f22 E
+ | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86)
| | |
- | x | 2:b18e25de2cf5 D
+ | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961)
| |/
o / 1:112478962961 B
|/
@@ -1131,6 +1437,7 @@
> | |
> X Y
> EOS
+ 1 new orphan changesets
$ hg rebase -r A+B+E -d F
note: not rebasing 4:a3d17304151f "A" (A), already in destination as 0:96cc3511f894 "C" (C)
note: not rebasing 5:b23a2cc00842 "B" (B), already in destination as 1:058c1e1fb10a "D" (D)
@@ -1149,6 +1456,7 @@
> /| | # replace: A -> C
> A B C # D/D = D
> EOS
+ 1 new orphan changesets
$ hg rebase -r A+B+D -d Z
note: not rebasing 0:426bada5c675 "A" (A), already in destination as 2:96cc3511f894 "C" (C)
rebasing 1:fc2b737bb2e5 "B" (B)
@@ -1179,6 +1487,7 @@
> /| | # replace: B -> C
> A B C # D/D = D
> EOS
+ 1 new orphan changesets
$ hg rebase -r B+A+D -d Z
rebasing 0:426bada5c675 "A" (A)
note: not rebasing 1:fc2b737bb2e5 "B" (B), already in destination as 2:96cc3511f894 "C" (C)
@@ -1222,9 +1531,11 @@
$ hg log -r . # working dir is at rev 3 (successor of 2)
3:be1832deae9a b (no-eol)
$ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now
+ bookmarking hidden changeset 1e9a3c00cbe9
+ (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a)
$ hg up 2 && hg log -r . # working dir is at rev 2 again
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- 2:1e9a3c00cbe9 b (no-eol)
+ 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol)
$ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b" (tip)
Check that working directory and bookmark was updated to rev 3 although rev 2
@@ -1234,7 +1545,7 @@
$ hg bookmarks
mybook 3:be1832deae9a
$ hg debugobsolete --rev tip
- 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
Obsoleted working parent and bookmark could be moved if an ancestor of working
parent gets moved:
@@ -1255,6 +1566,7 @@
$ hg rebase -r B+D1 -d E
rebasing 1:112478962961 "B" (B)
note: not rebasing 5:15ecf15e0114 "D1" (book D1 tip), already in destination as 2:0807738e0be9 "D2" (D2)
+ 1 new orphan changesets
$ hg log -G -T '{desc} {bookmarks}'
@ B book
|
@@ -1262,7 +1574,7 @@
| |
o | E
| |
- | o C
+ | * C
| |
o | D2
| |
@@ -1286,6 +1598,7 @@
> |/
> A
> EOS
+ 1 new orphan changesets
$ eval `hg tags -T '{tag}={node}\n'`
$ rm .hg/localtags
@@ -1326,6 +1639,8 @@
$ rm .hg/localtags
$ hg update -q $C --hidden
+ updating to a hidden changeset 7829726be4dc
+ (hidden revision '7829726be4dc' is pruned)
$ hg rebase -s $B -d $D
rebasing 1:2ec65233581b "B"
merging D
@@ -1338,7 +1653,7 @@
$ hg log -G
@ 2:b18e25de2cf5 D
|
- | @ 1:2ec65233581b B
+ | @ 1:2ec65233581b B (pruned using prune)
|/
o 0:426bada5c675 A
--- a/tests/test-rebase-parameters.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-parameters.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,7 +6,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
@@ -27,23 +27,23 @@
adding I
$ hg tglog
- @ 8: 'I'
+ @ 8: e7ec4e813ba6 'I'
|
- o 7: 'H'
+ o 7: 02de42196ebe 'H'
|
- | o 6: 'G'
+ | o 6: eea13746799a 'G'
|/|
- o | 5: 'F'
+ o | 5: 24b6387c8c8c 'F'
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -135,22 +135,22 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- @ 6: 'D'
+ @ 6: ed65089c18f8 'D'
|
- o 5: 'C'
+ o 5: 7621bf1a2f17 'C'
|
- o 4: 'B'
+ o 4: 9430a62369c6 'B'
|
- o 3: 'I'
+ o 3: e7ec4e813ba6 'I'
|
- o 2: 'H'
+ o 2: 02de42196ebe 'H'
|
- o 1: 'F'
+ o 1: 24b6387c8c8c 'F'
|
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
Try to rollback after a rebase (fail):
@@ -169,22 +169,22 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- @ 6: 'D'
+ @ 6: ed65089c18f8 'D'
|
- o 5: 'C'
+ o 5: 7621bf1a2f17 'C'
|
- o 4: 'B'
+ o 4: 9430a62369c6 'B'
|
- o 3: 'I'
+ o 3: e7ec4e813ba6 'I'
|
- o 2: 'H'
+ o 2: 02de42196ebe 'H'
|
- o 1: 'F'
+ o 1: 24b6387c8c8c 'F'
|
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -198,26 +198,26 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- @ 8: 'D'
+ @ 8: ed65089c18f8 'D'
|
- o 7: 'C'
+ o 7: 7621bf1a2f17 'C'
|
- o 6: 'B'
+ o 6: 9430a62369c6 'B'
|
- o 5: 'I'
+ o 5: e7ec4e813ba6 'I'
|
- o 4: 'H'
+ o 4: 02de42196ebe 'H'
|
- | o 3: 'G'
+ | o 3: eea13746799a 'G'
|/|
- o | 2: 'F'
+ o | 2: 24b6387c8c8c 'F'
| |
- | o 1: 'E'
+ | o 1: 9520eea781bc 'E'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -230,22 +230,22 @@
$ hg rebase --source 'desc("C")'
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg
$ hg tglog
- o 6: 'D'
+ o 6: 7726e9fd58f7 'D'
|
- o 5: 'C'
+ o 5: 72c8333623d0 'C'
|
- @ 4: 'I'
+ @ 4: e7ec4e813ba6 'I'
|
- o 3: 'H'
+ o 3: 02de42196ebe 'H'
|
- o 2: 'F'
+ o 2: 24b6387c8c8c 'F'
|
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -259,26 +259,26 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- @ 8: 'D'
+ @ 8: 8eeb3c33ad33 'D'
|
- o 7: 'C'
+ o 7: 2327fea05063 'C'
|
- o 6: 'B'
+ o 6: e4e5be0395b2 'B'
|
- | o 5: 'I'
+ | o 5: e7ec4e813ba6 'I'
| |
- | o 4: 'H'
+ | o 4: 02de42196ebe 'H'
| |
- o | 3: 'G'
+ o | 3: eea13746799a 'G'
|\|
- | o 2: 'F'
+ | o 2: 24b6387c8c8c 'F'
| |
- o | 1: 'E'
+ o | 1: 9520eea781bc 'E'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -292,22 +292,22 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- o 6: 'D'
+ o 6: ed65089c18f8 'D'
|
- o 5: 'C'
+ o 5: 7621bf1a2f17 'C'
|
- o 4: 'B'
+ o 4: 9430a62369c6 'B'
|
- @ 3: 'I'
+ @ 3: e7ec4e813ba6 'I'
|
- o 2: 'H'
+ o 2: 02de42196ebe 'H'
|
- o 1: 'F'
+ o 1: 24b6387c8c8c 'F'
|
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -320,26 +320,26 @@
$ hg rebase --source 2 --dest 7
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg
$ hg tglog
- o 8: 'D'
+ o 8: 668acadedd30 'D'
|
- o 7: 'C'
+ o 7: 09eb682ba906 'C'
|
- | @ 6: 'I'
+ | @ 6: e7ec4e813ba6 'I'
|/
- o 5: 'H'
+ o 5: 02de42196ebe 'H'
|
- | o 4: 'G'
+ | o 4: eea13746799a 'G'
|/|
- o | 3: 'F'
+ o | 3: 24b6387c8c8c 'F'
| |
- | o 2: 'E'
+ | o 2: 9520eea781bc 'E'
|/
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -353,26 +353,26 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg
$ hg tglog
- o 8: 'D'
+ o 8: 287cc92ba5a4 'D'
|
- o 7: 'C'
+ o 7: 6824f610a250 'C'
|
- o 6: 'B'
+ o 6: 7c6027df6a99 'B'
|
- | @ 5: 'I'
+ | @ 5: e7ec4e813ba6 'I'
|/
- o 4: 'H'
+ o 4: 02de42196ebe 'H'
|
- | o 3: 'G'
+ | o 3: eea13746799a 'G'
|/|
- o | 2: 'F'
+ o | 2: 24b6387c8c8c 'F'
| |
- | o 1: 'E'
+ | o 1: 9520eea781bc 'E'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -385,22 +385,22 @@
$ hg rebase --rev 'desc("C")::'
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg
$ hg tglog
- o 6: 'D'
+ o 6: 7726e9fd58f7 'D'
|
- o 5: 'C'
+ o 5: 72c8333623d0 'C'
|
- @ 4: 'I'
+ @ 4: e7ec4e813ba6 'I'
|
- o 3: 'H'
+ o 3: 02de42196ebe 'H'
|
- o 2: 'F'
+ o 2: 24b6387c8c8c 'F'
|
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -411,7 +411,7 @@
$ hg rebase -r 3 -r 6 --dest 8
rebasing 3:32af7686d403 "D"
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-rebase.hg
$ cd ..
Test --tool parameter:
@@ -441,7 +441,7 @@
$ hg rebase -s 2 -d 1 --tool internal:local
rebasing 2:e4e3f3546619 "c2b" (tip)
note: rebase of 2:e4e3f3546619 created no changes to commit
- saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
$ hg cat c2
c2
@@ -454,7 +454,7 @@
$ hg rebase -s 2 -d 1 --tool internal:other
rebasing 2:e4e3f3546619 "c2b" (tip)
- saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
$ hg cat c2
c2b
@@ -494,7 +494,7 @@
$ hg rebase -c --tool internal:fail
rebasing 2:e4e3f3546619 "c2b" (tip)
note: rebase of 2:e4e3f3546619 created no changes to commit
- saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
$ hg rebase -i
abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit")
--- a/tests/test-rebase-partial.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-partial.t Mon Jan 22 17:53:02 2018 -0500
@@ -11,7 +11,7 @@
> evolution.allowunstable=True
>
> [alias]
- > tglog = log -G --template "{rev}: {desc}"
+ > tglog = log -G --template "{rev}: {node|short} {desc}"
> EOF
$ rebasewithdag() {
@@ -37,15 +37,15 @@
> EOF
rebasing 2:b18e25de2cf5 "D" (D)
already rebased 3:26805aba1e60 "C" (C tip)
- o 4: D
+ o 4: fe3b4c6498fa D
|
- | o 3: C
+ | o 3: 26805aba1e60 C
|/
- | x 2: D
+ | x 2: b18e25de2cf5 D
| |
- o | 1: B
+ o | 1: 112478962961 B
|/
- o 0: A
+ o 0: 426bada5c675 A
Can collapse commits even if one is already in the right place
@@ -58,16 +58,16 @@
> EOF
rebasing 2:b18e25de2cf5 "D" (D)
rebasing 3:26805aba1e60 "C" (C tip)
- o 4: Collapsed revision
+ o 4: a2493f4ace65 Collapsed revision
| * D
| * C
- | x 3: C
+ | x 3: 26805aba1e60 C
|/
- | x 2: D
+ | x 2: b18e25de2cf5 D
| |
- o | 1: B
+ o | 1: 112478962961 B
|/
- o 0: A
+ o 0: 426bada5c675 A
Rebase with "holes". The commits after the hole should end up on the parent of
the hole (B below), not on top of the destination (A).
@@ -83,13 +83,13 @@
> EOF
already rebased 1:112478962961 "B" (B)
rebasing 3:f585351a92f8 "D" (D tip)
- o 4: D
+ o 4: 1e6da8103bc7 D
|
- | x 3: D
+ | x 3: f585351a92f8 D
| |
- | o 2: C
+ | o 2: 26805aba1e60 C
|/
- o 1: B
+ o 1: 112478962961 B
|
- o 0: A
+ o 0: 426bada5c675 A
--- a/tests/test-rebase-pull.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-pull.t Mon Jan 22 17:53:02 2018 -0500
@@ -4,7 +4,7 @@
> histedit=
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
@@ -48,7 +48,7 @@
Now b has one revision to be pulled from a:
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -56,21 +56,21 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 77ae9631bcca
rebasing 2:ff8d69a621f9 "L1"
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg
$ hg tglog
- @ 3: 'L1'
+ @ 3: d80cc2da061e 'L1'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
Re-run:
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
no changes found
@@ -103,9 +103,9 @@
$ hg clone --noupdate c d
$ cd d
$ hg tglog
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
$ hg update --quiet 0
$ echo M1 > M1
@@ -138,7 +138,7 @@
$ hg book norebase
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -150,14 +150,14 @@
updating bookmark norebase
$ hg tglog -l 1
- @ 2: 'R1'
+ @ 2: 77ae9631bcca 'R1'
|
~
pull --rebase --update should ignore --update:
$ hg pull --rebase --update
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
no changes found
@@ -166,12 +166,12 @@
$ hg up -q 1
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
no changes found
$ hg tglog -l 1
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
~
@@ -181,11 +181,11 @@
$ cd a
$ hg tglog
- @ 2: 'R1'
+ @ 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
$ echo R2 > R2
$ hg ci -Am R2
@@ -195,18 +195,18 @@
adding R3
$ cd ../c
$ hg tglog
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- @ 1: 'C2'
+ @ 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
$ echo L1 > L1
$ hg ci -Am L1
adding L1
created new head
$ hg pull --rev tip --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -214,19 +214,19 @@
added 2 changesets with 2 changes to 2 files
new changesets 31cd3a05214e:770a61882ace
rebasing 3:ff8d69a621f9 "L1"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg
$ hg tglog
- @ 5: 'L1'
+ @ 5: 518d153c0ba3 'L1'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
pull --rebase works with bundle2 turned on
@@ -235,21 +235,21 @@
$ hg ci -Am R4
adding R4
$ hg tglog
- @ 5: 'R4'
+ @ 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
$ cd ../c
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -257,21 +257,21 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 00e3b7781125
rebasing 5:518d153c0ba3 "L1"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-rebase.hg
$ hg tglog
- @ 6: 'L1'
+ @ 6: 0d0727eb7ce0 'L1'
|
- o 5: 'R4'
+ o 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
pull --rebase only update if there is nothing to rebase
@@ -281,19 +281,19 @@
$ hg ci -Am R5
adding R5
$ hg tglog
- @ 6: 'R5'
+ @ 6: 88dd24261747 'R5'
|
- o 5: 'R4'
+ o 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
$ cd ../c
$ echo L2 > L2
@@ -302,7 +302,7 @@
$ hg up 'desc(L1)'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -311,25 +311,25 @@
new changesets 88dd24261747
rebasing 6:0d0727eb7ce0 "L1"
rebasing 7:c1f58876e3bf "L2"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-rebase.hg
$ hg tglog
- o 8: 'L2'
+ o 8: 6dc0ea5dcf55 'L2'
|
- @ 7: 'L1'
+ @ 7: 864e0a2d2614 'L1'
|
- o 6: 'R5'
+ o 6: 88dd24261747 'R5'
|
- o 5: 'R4'
+ o 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
pull --rebase update (no rebase) use proper update:
@@ -344,7 +344,7 @@
$ hg up 'desc(R5)'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -356,25 +356,25 @@
updated to "65bc164c1d9b: R6"
1 other heads for branch "default"
$ hg tglog
- @ 9: 'R6'
+ @ 9: 65bc164c1d9b 'R6'
|
- | o 8: 'L2'
+ | o 8: 6dc0ea5dcf55 'L2'
| |
- | o 7: 'L1'
+ | o 7: 864e0a2d2614 'L1'
|/
- o 6: 'R5'
+ o 6: 88dd24261747 'R5'
|
- o 5: 'R4'
+ o 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
Multiple pre-existing heads on the branch
@@ -394,7 +394,7 @@
$ hg up 'desc(L2)'
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -421,7 +421,7 @@
$ hg up 'desc(L2)'
2 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg pull --rebase
- pulling from $TESTTMP/a (glob)
+ pulling from $TESTTMP/a
searching for changes
adding changesets
adding manifests
@@ -430,31 +430,31 @@
new changesets f7d3e42052f9
rebasing 7:864e0a2d2614 "L1"
rebasing 8:6dc0ea5dcf55 "L2"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/864e0a2d2614-2f72c89c-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/864e0a2d2614-2f72c89c-rebase.hg
$ hg tglog
- @ 12: 'L2'
+ @ 12: 3603a865eea0 'L2'
|
- o 11: 'L1'
+ o 11: bcc8a9cd04bf 'L1'
|
- o 10: 'R7'
+ o 10: f7d3e42052f9 'R7'
|
- | o 9: 'M1'
+ | o 9: 41fab4eef82f 'M1'
|/
- | o 8: 'B1' unrelatedbranch
+ | o 8: 39c381359968 'B1' unrelatedbranch
|/
- o 7: 'R6'
+ o 7: 65bc164c1d9b 'R6'
|
- o 6: 'R5'
+ o 6: 88dd24261747 'R5'
|
- o 5: 'R4'
+ o 5: 00e3b7781125 'R4'
|
- o 4: 'R3'
+ o 4: 770a61882ace 'R3'
|
- o 3: 'R2'
+ o 3: 31cd3a05214e 'R2'
|
- o 2: 'R1'
+ o 2: 77ae9631bcca 'R1'
|
- o 1: 'C2'
+ o 1: 783333faa078 'C2'
|
- o 0: 'C1'
+ o 0: 05d58a0c15dd 'C1'
--- a/tests/test-rebase-rename.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-rename.t Mon Jan 22 17:53:02 2018 -0500
@@ -3,7 +3,7 @@
> rebase=
>
> [alias]
- > tlog = log --template "{rev}: '{desc}' {branches}\n"
+ > tlog = log --template "{rev}: {node|short} '{desc}' {branches}\n"
> tglog = tlog --graph
> EOF
@@ -21,7 +21,7 @@
adding d/b
$ hg mv d d-renamed
- moving d/b to d-renamed/b (glob)
+ moving d/b to d-renamed/b
$ hg ci -m 'rename B'
$ hg up -q -C 1
@@ -34,19 +34,19 @@
created new head
$ hg tglog
- @ 3: 'rename A'
+ @ 3: 73a3ee40125d 'rename A'
|
- | o 2: 'rename B'
+ | o 2: 220d0626d185 'rename B'
|/
- o 1: 'B'
+ o 1: 3ab5da9a5c01 'B'
|
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
Rename is tracked:
$ hg tlog -p --git -r tip
- 3: 'rename A'
+ 3: 73a3ee40125d 'rename A'
diff --git a/a b/a-renamed
rename from a
rename to a-renamed
@@ -61,22 +61,22 @@
$ hg rebase -s 3 -d 2
rebasing 3:73a3ee40125d "rename A" (tip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-rebase.hg
$ hg tglog
- @ 3: 'rename A'
+ @ 3: 032a9b75e83b 'rename A'
|
- o 2: 'rename B'
+ o 2: 220d0626d185 'rename B'
|
- o 1: 'B'
+ o 1: 3ab5da9a5c01 'B'
|
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
Rename is not lost:
$ hg tlog -p --git -r tip
- 3: 'rename A'
+ 3: 032a9b75e83b 'rename A'
diff --git a/a b/a-renamed
rename from a
rename to a-renamed
@@ -132,18 +132,18 @@
created new head
$ hg tglog
- @ 3: 'copy A'
+ @ 3: 0a8162ff18a8 'copy A'
|
- | o 2: 'copy B'
+ | o 2: 39e588434882 'copy B'
|/
- o 1: 'B'
+ o 1: 6c81ed0049f8 'B'
|
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
Copy is tracked:
$ hg tlog -p --git -r tip
- 3: 'copy A'
+ 3: 0a8162ff18a8 'copy A'
diff --git a/a b/a-copied
copy from a
copy to a-copied
@@ -152,22 +152,22 @@
$ hg rebase -s 3 -d 2
rebasing 3:0a8162ff18a8 "copy A" (tip)
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-rebase.hg
$ hg tglog
- @ 3: 'copy A'
+ @ 3: 98f6e6dbf45a 'copy A'
|
- o 2: 'copy B'
+ o 2: 39e588434882 'copy B'
|
- o 1: 'B'
+ o 1: 6c81ed0049f8 'B'
|
- o 0: 'A'
+ o 0: 1994f17a630e 'A'
Copy is not lost:
$ hg tlog -p --git -r tip
- 3: 'copy A'
+ 3: 98f6e6dbf45a 'copy A'
diff --git a/a b/a-copied
copy from a
copy to a-copied
@@ -223,20 +223,20 @@
created new head
$ hg tglog
- @ 4: 'Another unrelated change'
+ @ 4: b918d683b091 'Another unrelated change'
|
- | o 3: 'Rename file2 back to file1'
+ | o 3: 1ac17e43d8aa 'Rename file2 back to file1'
|/
- o 2: 'Unrelated change'
+ o 2: 480101d66d8d 'Unrelated change'
|
- o 1: 'Rename file1 to file2'
+ o 1: be44c61debd2 'Rename file1 to file2'
|
- o 0: 'Adding file1'
+ o 0: 8ce9a346991d 'Adding file1'
$ hg rebase -s 4 -d 3
rebasing 4:b918d683b091 "Another unrelated change" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-rebase.hg
$ hg diff --stat -c .
unrelated.txt | 1 +
@@ -263,13 +263,13 @@
Note that there are four entries in the log for d
$ hg tglog --follow d
- @ 3: 'File d created as copy of c and modified'
+ @ 3: 421b7e82bb85 'File d created as copy of c and modified'
|
- o 2: 'File c created as copy of b and modified'
+ o 2: 327f772bc074 'File c created as copy of b and modified'
|
- o 1: 'File b created as copy of a and modified'
+ o 1: 79d255d24ad2 'File b created as copy of a and modified'
|
- o 0: 'File a created'
+ o 0: b220cd6d2326 'File a created'
Update back to before we performed copies, and inject an unrelated change.
$ hg update 0
@@ -287,19 +287,19 @@
rebasing 1:79d255d24ad2 "File b created as copy of a and modified"
rebasing 2:327f772bc074 "File c created as copy of b and modified"
rebasing 3:421b7e82bb85 "File d created as copy of c and modified"
- saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-rebase.hg
$ hg update 4
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
There should still be four entries in the log for d
$ hg tglog --follow d
- @ 4: 'File d created as copy of c and modified'
+ @ 4: dbb9ba033561 'File d created as copy of c and modified'
|
- o 3: 'File c created as copy of b and modified'
+ o 3: af74b229bc02 'File c created as copy of b and modified'
|
- o 2: 'File b created as copy of a and modified'
+ o 2: 68bf06433839 'File b created as copy of a and modified'
:
- o 0: 'File a created'
+ o 0: b220cd6d2326 'File a created'
Same steps as above, but with --collapse on rebase to make sure the
copy records collapse correctly.
@@ -314,7 +314,7 @@
merging b and c to c
rebasing 4:dbb9ba033561 "File d created as copy of c and modified"
merging c and d to d
- saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-rebase.hg
$ hg co tip
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -322,11 +322,11 @@
copy of 'a'.
$ hg tglog --follow d
- @ 3: 'Collapsed revision
+ @ 3: 5a46b94210e5 'Collapsed revision
: * File b created as copy of a and modified
: * File c created as copy of b and modified
: * File d created as copy of c and modified'
- o 0: 'File a created'
+ o 0: b220cd6d2326 'File a created'
$ cd ..
--- a/tests/test-rebase-scenario-global.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-scenario-global.t Mon Jan 22 17:53:02 2018 -0500
@@ -7,7 +7,7 @@
> publish=False
>
> [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > tglog = log -G --template "{rev}: {node|short} '{desc}' {branches}\n"
> EOF
@@ -34,21 +34,21 @@
$ cd a1
$ hg tglog
- @ 7: 'H'
+ @ 7: 02de42196ebe 'H'
|
- | o 6: 'G'
+ | o 6: eea13746799a 'G'
|/|
- o | 5: 'F'
+ o | 5: 24b6387c8c8c 'F'
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ hg status --rev "3^1" --rev 3
@@ -66,27 +66,27 @@
HG: user: Nicolas Dumazet <nicdumz.commits@gmail.com>
HG: branch 'default'
HG: added D
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg
$ cat D.orig
collide
$ rm D.orig
$ hg tglog
- o 7: 'D'
+ o 7: 1619f02ff7dd 'D'
|
- @ 6: 'H'
+ @ 6: 02de42196ebe 'H'
|
- | o 5: 'G'
+ | o 5: eea13746799a 'G'
|/|
- o | 4: 'F'
+ o | 4: 24b6387c8c8c 'F'
| |
- | o 3: 'E'
+ | o 3: 9520eea781bc 'E'
|/
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -101,27 +101,27 @@
$ HGEDITOR=cat hg rebase -s 3 -d 5 --config merge.checkunknown=ignore
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg
$ cat D.orig
collide
$ rm D.orig
$ hg tglog
- o 7: 'D'
+ o 7: 2107530e74ab 'D'
|
- | @ 6: 'H'
+ | @ 6: 02de42196ebe 'H'
|/
- | o 5: 'G'
+ | o 5: eea13746799a 'G'
|/|
- o | 4: 'F'
+ o | 4: 24b6387c8c8c 'F'
| |
- | o 3: 'E'
+ | o 3: 9520eea781bc 'E'
|/
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -139,24 +139,24 @@
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
note: rebase of 6:eea13746799a created no changes to commit
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg
$ f E.orig
E.orig: file not found
$ hg tglog
- o 6: 'E'
+ o 6: 9f8b8ec77260 'E'
|
- @ 5: 'H'
+ @ 5: 02de42196ebe 'H'
|
- o 4: 'F'
+ o 4: 24b6387c8c8c 'F'
|
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -171,22 +171,22 @@
rebasing 6:eea13746799a "G"
note: rebase of 6:eea13746799a created no changes to commit
rebasing 7:02de42196ebe "H" (tip)
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg
$ hg tglog
- @ 6: 'H'
+ @ 6: e9240aeaa6ad 'H'
|
- o 5: 'F'
+ o 5: 5d0ccadb6e3e 'F'
|
- o 4: 'E'
+ o 4: 9520eea781bc 'E'
|
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -198,24 +198,24 @@
$ hg rebase -s 6 -d 7
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-rebase.hg
$ hg tglog
- o 7: 'G'
+ o 7: 397834907a90 'G'
|\
- | @ 6: 'H'
+ | @ 6: 02de42196ebe 'H'
| |
- | o 5: 'F'
+ | o 5: 24b6387c8c8c 'F'
| |
- o | 4: 'E'
+ o | 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -229,24 +229,24 @@
rebasing 5:24b6387c8c8c "F"
rebasing 6:eea13746799a "G"
rebasing 7:02de42196ebe "H" (tip)
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg
$ hg tglog
- @ 7: 'H'
+ @ 7: c87be72f9641 'H'
|
- | o 6: 'G'
+ | o 6: 17badd73d4f1 'G'
|/|
- o | 5: 'F'
+ o | 5: 74fb9ed646c4 'F'
| |
- | o 4: 'E'
+ | o 4: 9520eea781bc 'E'
| |
- | | o 3: 'D'
+ | | o 3: 32af7686d403 'D'
| | |
- +---o 2: 'C'
+ +---o 2: 5fddd98957c8 'C'
| |
- o | 1: 'B'
+ o | 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
$ cd ..
@@ -302,23 +302,23 @@
$ hg rebase -d 0 -s 2
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg
$ hg tglog
- o 7: 'D'
+ o 7: c9659aac0000 'D'
|
- o 6: 'C'
+ o 6: e1c4361dd923 'C'
|
- | @ 5: 'H'
+ | @ 5: 02de42196ebe 'H'
| |
- | | o 4: 'G'
+ | | o 4: eea13746799a 'G'
| |/|
- | o | 3: 'F'
+ | o | 3: 24b6387c8c8c 'F'
|/ /
- | o 2: 'E'
+ | o 2: 9520eea781bc 'E'
|/
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
Check rebasing public changeset
@@ -346,31 +346,31 @@
5
$ hg rebase -s9 -d0
rebasing 9:2b23e52411f4 "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-rebase.hg
$ hg id -n # check we updated back to parent
5
$ hg log --template "{phase}\n" -r 9
draft
$ hg rebase -s9 -d1
rebasing 9:2cb10d0cfc6c "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-rebase.hg
$ hg log --template "{phase}\n" -r 9
draft
$ hg phase --force --secret 9
$ hg rebase -s9 -d0
rebasing 9:c5b12b67163a "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-rebase.hg
$ hg log --template "{phase}\n" -r 9
secret
$ hg rebase -s9 -d1
rebasing 9:2a0524f868ac "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-rebase.hg
$ hg log --template "{phase}\n" -r 9
secret
Source phase lower than destination phase: new changeset get the phase of destination:
$ hg rebase -s8 -d9
rebasing 8:6d4f22462821 "C"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-rebase.hg
$ hg log --template "{phase}\n" -r 'rev(9)'
secret
@@ -399,7 +399,7 @@
$ hg rebase -s 1 -d 2
rebasing 1:d2ae7f538514 "b"
- saved backup bundle to $TESTTMP/issue5678/.hg/strip-backup/d2ae7f538514-2953539b-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/issue5678/.hg/strip-backup/d2ae7f538514-2953539b-rebase.hg
$ hg log -G -T '{rev}:{node|shortest} {phase} {desc}\n'
o 2:c882 draft b
|
@@ -424,23 +424,23 @@
new changesets 9ae2ed22e576:479ddb54a924
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg tglog
- o 8: 'I'
+ o 8: 479ddb54a924 'I'
|
- o 7: 'H'
+ o 7: 72434a4e60b0 'H'
|
- o 6: 'G'
+ o 6: 3d8a618087a7 'G'
|
- | o 5: 'F'
+ | o 5: 41bfcc75ed73 'F'
| |
- | o 4: 'E'
+ | o 4: c01897464e7f 'E'
|/
- o 3: 'D'
+ o 3: ffd453c31098 'D'
|
- o 2: 'C'
+ o 2: c9e50f6cdc55 'C'
|
- | o 1: 'B'
+ | o 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -462,33 +462,33 @@
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
$ hg tglog
- o 13: 'I'
+ o 13: 9bf1d9358a90 'I'
|
- o 12: 'H'
+ o 12: 274623a778d4 'H'
|
- o 11: 'G'
+ o 11: ab8c8617c8e8 'G'
|
- o 10: 'D'
+ o 10: c8cbf59f70da 'D'
|
- o 9: 'C'
+ o 9: 563e4faab485 'C'
|
- | o 8: 'I'
+ | o 8: 479ddb54a924 'I'
| |
- | o 7: 'H'
+ | o 7: 72434a4e60b0 'H'
| |
- | o 6: 'G'
+ | o 6: 3d8a618087a7 'G'
| |
- | | o 5: 'F'
+ | | o 5: 41bfcc75ed73 'F'
| | |
- | | o 4: 'E'
+ | | o 4: c01897464e7f 'E'
| |/
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
| |
- | o 2: 'C'
+ | o 2: c9e50f6cdc55 'C'
| |
- o | 1: 'B'
+ o | 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -507,31 +507,31 @@
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
$ hg tglog
- o 12: 'I'
+ o 12: 9d7da0053b1c 'I'
|
- o 11: 'H'
+ o 11: 8fbd00952cbc 'H'
|
- o 10: 'G'
+ o 10: 51d434a615ee 'G'
|
- o 9: 'D'
+ o 9: a9c125634b0b 'D'
|
- | o 8: 'I'
+ | o 8: 479ddb54a924 'I'
| |
- | o 7: 'H'
+ | o 7: 72434a4e60b0 'H'
| |
- | o 6: 'G'
+ | o 6: 3d8a618087a7 'G'
| |
- | | o 5: 'F'
+ | | o 5: 41bfcc75ed73 'F'
| | |
- | | o 4: 'E'
+ | | o 4: c01897464e7f 'E'
| |/
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
| |
- | o 2: 'C'
+ | o 2: c9e50f6cdc55 'C'
| |
- o | 1: 'B'
+ o | 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -549,29 +549,29 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
$ hg tglog
- o 11: 'H'
+ o 11: 8fbd00952cbc 'H'
|
- o 10: 'G'
+ o 10: 51d434a615ee 'G'
|
- o 9: 'D'
+ o 9: a9c125634b0b 'D'
|
- | o 8: 'I'
+ | o 8: 479ddb54a924 'I'
| |
- | o 7: 'H'
+ | o 7: 72434a4e60b0 'H'
| |
- | o 6: 'G'
+ | o 6: 3d8a618087a7 'G'
| |
- | | o 5: 'F'
+ | | o 5: 41bfcc75ed73 'F'
| | |
- | | o 4: 'E'
+ | | o 4: c01897464e7f 'E'
| |/
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
| |
- | o 2: 'C'
+ | o 2: c9e50f6cdc55 'C'
| |
- o | 1: 'B'
+ o | 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -591,33 +591,33 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
$ hg tglog
- o 13: 'H'
+ o 13: 8fbd00952cbc 'H'
|
- o 12: 'G'
+ o 12: 51d434a615ee 'G'
|
- | o 11: 'F'
+ | o 11: df23d8bda0b7 'F'
| |
- | o 10: 'E'
+ | o 10: 47b7889448ff 'E'
|/
- o 9: 'D'
+ o 9: a9c125634b0b 'D'
|
- | o 8: 'I'
+ | o 8: 479ddb54a924 'I'
| |
- | o 7: 'H'
+ | o 7: 72434a4e60b0 'H'
| |
- | o 6: 'G'
+ | o 6: 3d8a618087a7 'G'
| |
- | | o 5: 'F'
+ | | o 5: 41bfcc75ed73 'F'
| | |
- | | o 4: 'E'
+ | | o 4: c01897464e7f 'E'
| |/
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
| |
- | o 2: 'C'
+ | o 2: c9e50f6cdc55 'C'
| |
- o | 1: 'B'
+ o | 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -632,25 +632,25 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
- saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-rebase.hg
$ hg tglog
- o 8: 'I'
+ o 8: fcb52e68a694 'I'
|
- o 7: 'H'
+ o 7: 77bd65cd7600 'H'
|
- o 6: 'G'
+ o 6: 12d0e738fb18 'G'
|
- | o 5: 'F'
+ | o 5: 41bfcc75ed73 'F'
| |
- | o 4: 'E'
+ | o 4: c01897464e7f 'E'
| |
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
|/
- o 2: 'C'
+ o 2: c9e50f6cdc55 'C'
|
- | o 1: 'B'
+ | o 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -667,25 +667,25 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
- saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-rebase.hg
$ hg tglog
- o 8: 'I'
+ o 8: 9136df9a87cf 'I'
|
- o 7: 'H'
+ o 7: 23e8f30da832 'H'
|
- o 6: 'G'
+ o 6: b0efe8534e8b 'G'
|
- | o 5: 'F'
+ | o 5: 6eb5b496ab79 'F'
| |
- | o 4: 'E'
+ | o 4: d15eade9b0b1 'E'
|/
- | o 3: 'D'
+ | o 3: ffd453c31098 'D'
| |
- | o 2: 'C'
+ | o 2: c9e50f6cdc55 'C'
| |
- o | 1: 'B'
+ o | 1: 8fd0f7e49f53 'B'
|/
- o 0: 'A'
+ o 0: 9ae2ed22e576 'A'
$ cd ..
@@ -709,34 +709,34 @@
$ hg add K
$ hg commit -m K
$ hg tglog
- @ 10: 'K'
+ @ 10: 23a4ace37988 'K'
|
- o 9: 'J'
+ o 9: 1301922eeb0c 'J'
|
- | o 8: 'I'
+ | o 8: e7ec4e813ba6 'I'
| |
- | o 7: 'H'
+ | o 7: 02de42196ebe 'H'
| |
- +---o 6: 'G'
+ +---o 6: eea13746799a 'G'
| |/
- | o 5: 'F'
+ | o 5: 24b6387c8c8c 'F'
| |
- o | 4: 'E'
+ o | 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
(actual test)
$ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
rebasing 8:e7ec4e813ba6 "I"
rebasing 10:23a4ace37988 "K" (tip)
- saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-rebase.hg
$ hg log --rev 'children(desc(G))'
changeset: 9:adb617877056
parent: 6:eea13746799a
@@ -752,27 +752,27 @@
summary: K
$ hg tglog
- @ 10: 'K'
+ @ 10: 882431a34a0e 'K'
|
- | o 9: 'I'
+ | o 9: adb617877056 'I'
|/
- | o 8: 'J'
+ | o 8: 1301922eeb0c 'J'
| |
- | | o 7: 'H'
+ | | o 7: 02de42196ebe 'H'
| | |
- o---+ 6: 'G'
+ o---+ 6: eea13746799a 'G'
|/ /
- | o 5: 'F'
+ | o 5: 24b6387c8c8c 'F'
| |
- o | 4: 'E'
+ o | 4: 9520eea781bc 'E'
|/
- | o 3: 'D'
+ | o 3: 32af7686d403 'D'
| |
- | o 2: 'C'
+ | o 2: 5fddd98957c8 'C'
| |
- | o 1: 'B'
+ | o 1: 42ccdea3bb16 'B'
|/
- o 0: 'A'
+ o 0: cd010b8cd998 'A'
Test that rebase is not confused by $CWD disappearing during rebase (issue4121)
@@ -803,7 +803,7 @@
current directory was removed (rmcwd !)
(consider changing to repo root: $TESTTMP/cwd-vanish) (rmcwd !)
rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
- saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-rebase.hg
Get back to the root of cwd-vanish. Note that even though `cd ..`
works on most systems, it does not work on FreeBSD 10, so we use an
@@ -833,19 +833,19 @@
created new head
$ hg tglog
- @ 6: 'G'
+ @ 6: 124bb27b6f28 'G'
|
- | o 5: 'F'
+ | o 5: 412b391de760 'F'
| |
- | | o 4: 'E'
+ | | o 4: 82ae8dc7a9b7 'E'
| | |
- | o | 3: 'D'
+ | o | 3: ab709c9f7171 'D'
| | |
- | | o 2: 'C'
+ | | o 2: d84f5cfaaf14 'C'
| |/
- | o 1: 'B'
+ | o 1: 76035bbd54bd 'B'
|/
- o 0: 'A'
+ o 0: 216878401574 'A'
$ hg rebase -s 1 -d 6
@@ -854,22 +854,22 @@
rebasing 4:82ae8dc7a9b7 "E"
rebasing 3:ab709c9f7171 "D"
rebasing 5:412b391de760 "F"
- saved backup bundle to $TESTTMP/order/.hg/strip-backup/76035bbd54bd-e341bc99-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/order/.hg/strip-backup/76035bbd54bd-e341bc99-rebase.hg
$ hg tglog
- o 6: 'F'
+ o 6: 31884cfb735e 'F'
|
- o 5: 'D'
+ o 5: 6d89fa5b0909 'D'
|
- | o 4: 'E'
+ | o 4: de64d97c697b 'E'
| |
- | o 3: 'C'
+ | o 3: b18e4d2d0aa1 'C'
|/
- o 2: 'B'
+ o 2: 0983daf9ff6a 'B'
|
- @ 1: 'G'
+ @ 1: 124bb27b6f28 'G'
|
- o 0: 'A'
+ o 0: 216878401574 'A'
Test experimental revset
--- a/tests/test-rebase-templates.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rebase-templates.t Mon Jan 22 17:53:02 2018 -0500
@@ -42,3 +42,16 @@
$ hg rebase -s 1 -d 5 -q -T "{nodechanges|json}"
{"29becc82797a4bc11ec8880b58eaecd2ab3e7760": ["d9d6773efc831c274eace04bc13e8e6412517139"]} (no-eol)
+
+ $ hg log -G -T "{rev}:{node|short} {desc}"
+ o 6:d9d6773efc83 Added b
+ |
+ @ 5:df21b32134ba Added d
+ |
+ o 4:849767420fd5 Added c
+ |
+ o 0:18d04c59bb5d Added a
+
+
+ $ hg rebase -s 6 -d 4 -q -T "{nodechanges % '{oldnode}:{newnodes % ' {node} '}'}"
+ d9d6773efc831c274eace04bc13e8e6412517139: f48cd65c6dc3d2acb55da54402a5b029546e546f (no-eol)
--- a/tests/test-relink.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-relink.t Mon Jan 22 17:53:02 2018 -0500
@@ -43,7 +43,7 @@
don't sit forever trying to double-lock the source repo
$ hg relink .
- relinking $TESTTMP/repo/.hg/store to $TESTTMP/repo/.hg/store (glob)
+ relinking $TESTTMP/repo/.hg/store to $TESTTMP/repo/.hg/store
there is nothing to relink
--- a/tests/test-remove.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-remove.t Mon Jan 22 17:53:02 2018 -0500
@@ -189,9 +189,9 @@
\r (no-eol) (esc)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-21 state clean, options -A
+21 state clean, options -Av
- $ remove -A foo
+ $ remove -Av foo
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
@@ -205,10 +205,10 @@
./foo
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-22 state modified, options -A
+22 state modified, options -Av
$ echo b >> foo
- $ remove -A foo
+ $ remove -Av foo
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
@@ -322,8 +322,8 @@
\r (no-eol) (esc)
deleting [===========================================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
- removing test/bar (glob)
- removing test/foo (glob)
+ removing test/bar
+ removing test/foo
exit code: 0
R test/bar
R test/foo
@@ -346,8 +346,8 @@
\r (no-eol) (esc)
deleting [===========================================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
- removing test/bar (glob)
- removing test/foo (glob)
+ removing test/bar
+ removing test/foo
exit code: 0
R test/bar
R test/foo
@@ -357,9 +357,32 @@
\r (no-eol) (esc)
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-dir, options -A
+dir, options -Av
$ rm test/bar
+ $ remove -Av test
+ \r (no-eol) (esc)
+ deleting [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ \r (no-eol) (esc)
+ skipping [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ \r (no-eol) (esc)
+ deleting [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ removing test/bar
+ not removing test/foo: file still exists
+ exit code: 1
+ R test/bar
+ ./foo
+ ./test/foo
+ \r (no-eol) (esc)
+ updating [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+dir, options -A <dir>
+ $ rm test/bar
$ remove -A test
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
@@ -370,8 +393,27 @@
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
- removing test/bar (glob)
- not removing test/foo: file still exists (glob)
+ removing test/bar
+ exit code: 1
+ R test/bar
+ ./foo
+ ./test/foo
+ \r (no-eol) (esc)
+ updating [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+without any files/dirs, options -A
+ $ rm test/bar
+ $ remove -A
+ \r (no-eol) (esc)
+ skipping [=====================> ] 1/2\r (no-eol) (esc)
+ skipping [===========================================>] 2/2\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ \r (no-eol) (esc)
+ deleting [===========================================>] 1/1\r (no-eol) (esc)
+ \r (no-eol) (esc)
+ removing test/bar
exit code: 1
R test/bar
./foo
@@ -394,8 +436,8 @@
\r (no-eol) (esc)
deleting [===========================================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
- removing test/bar (glob)
- removing test/foo (glob)
+ removing test/bar
+ removing test/foo
exit code: 0
R test/bar
R test/foo
@@ -421,7 +463,7 @@
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
- removing issue1861/b/c/y (glob)
+ removing issue1861/b/c/y
$ hg ci -m remove
$ ls issue1861
x
@@ -455,7 +497,7 @@
\r (no-eol) (esc)
deleting [===========================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
- removing d1/a (glob)
+ removing d1/a
$ hg rm --after nosuch
nosuch: * (glob)
--- a/tests/test-rename-dir-merge.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rename-dir-merge.t Mon Jan 22 17:53:02 2018 -0500
@@ -11,8 +11,8 @@
$ hg co -C 0
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg mv a b
- moving a/a to b/a (glob)
- moving a/b to b/b (glob)
+ moving a/a to b/a
+ moving a/b to b/b
$ hg ci -m "1 mv a/ b/"
$ hg co -C 0
@@ -48,7 +48,7 @@
b/b: remote created -> g
getting b/b
b/c: remote directory rename - move from a/c -> dm
- moving a/c to b/c (glob)
+ moving a/c to b/c
3 files updated, 0 files merged, 2 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -65,7 +65,7 @@
? a/d
$ hg ci -m "3 merge 2+1"
$ hg debugrename b/c
- b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob)
+ b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88
$ hg co -C 1
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -100,7 +100,7 @@
$ hg ci -m "4 merge 1+2"
created new head
$ hg debugrename b/c
- b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob)
+ b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88
Local directory rename with conflicting file added in remote source directory
and untracked in local target directory.
@@ -134,7 +134,7 @@
merging b/c and a/c to b/c
warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg st -A
M b/c
@@ -164,7 +164,7 @@
merging a/c and b/c to b/c
warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark')
2 files updated, 0 files merged, 2 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg st -A
M b/a
@@ -191,7 +191,7 @@
$ mkdir a
$ echo foo > a/f
$ hg add a
- adding a/f (glob)
+ adding a/f
$ hg ci -m "a/f == foo"
$ cd ..
@@ -200,7 +200,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd r2
$ hg mv a b
- moving a/f to b/f (glob)
+ moving a/f to b/f
$ echo foo1 > b/f
$ hg ci -m" a -> b, b/f == foo1"
$ cd ..
@@ -209,7 +209,7 @@
$ mkdir a/aa
$ echo bar > a/aa/g
$ hg add a/aa
- adding a/aa/g (glob)
+ adding a/aa/g
$ hg ci -m "a/aa/g"
$ hg pull ../r2
pulling from ../r2
--- a/tests/test-rename-merge2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rename-merge2.t Mon Jan 22 17:53:02 2018 -0500
@@ -713,7 +713,7 @@
launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
--------------
M a
M b
@@ -758,7 +758,7 @@
launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
--------------
M b
C a
--- a/tests/test-rename.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-rename.t Mon Jan 22 17:53:02 2018 -0500
@@ -70,7 +70,7 @@
rename --after a single file to a nonexistent target filename
$ hg rename --after d1/a dummy
- d1/a: not recording move - dummy does not exist (glob)
+ d1/a: not recording move - dummy does not exist
move a single file to an existing directory
@@ -120,10 +120,10 @@
rename directory d1 as d3
$ hg rename d1/ d3
- moving d1/a to d3/a (glob)
- moving d1/b to d3/b (glob)
- moving d1/ba to d3/ba (glob)
- moving d1/d11/a1 to d3/d11/a1 (glob)
+ moving d1/a to d3/a
+ moving d1/b to d3/b
+ moving d1/ba to d3/ba
+ moving d1/d11/a1 to d3/d11/a1
$ hg status -C
A d3/a
d1/a
@@ -145,10 +145,10 @@
$ mv d1 d3
$ hg rename --after d1 d3
- moving d1/a to d3/a (glob)
- moving d1/b to d3/b (glob)
- moving d1/ba to d3/ba (glob)
- moving d1/d11/a1 to d3/d11/a1 (glob)
+ moving d1/a to d3/a
+ moving d1/b to d3/b
+ moving d1/ba to d3/ba
+ moving d1/d11/a1 to d3/d11/a1
$ hg status -C
A d3/a
d1/a
@@ -169,7 +169,7 @@
move a directory using a relative path
$ (cd d2; mkdir d3; hg rename ../d1/d11 d3)
- moving ../d1/d11/a1 to d3/d11/a1 (glob)
+ moving ../d1/d11/a1 to d3/d11/a1
$ hg status -C
A d2/d3/d11/a1
d1/d11/a1
@@ -181,7 +181,7 @@
move --after a directory using a relative path
$ (cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3)
- moving ../d1/d11/a1 to d3/d11/a1 (glob)
+ moving ../d1/d11/a1 to d3/d11/a1
$ hg status -C
A d2/d3/d11/a1
d1/d11/a1
@@ -193,7 +193,7 @@
move directory d1/d11 to an existing directory d2 (removes empty d1)
$ hg rename d1/d11/ d2
- moving d1/d11/a1 to d2/d11/a1 (glob)
+ moving d1/d11/a1 to d2/d11/a1
$ hg status -C
A d2/d11/a1
d1/d11/a1
@@ -206,11 +206,11 @@
$ mkdir d3
$ hg rename d1 d2 d3
- moving d1/a to d3/d1/a (glob)
- moving d1/b to d3/d1/b (glob)
- moving d1/ba to d3/d1/ba (glob)
- moving d1/d11/a1 to d3/d1/d11/a1 (glob)
- moving d2/b to d3/d2/b (glob)
+ moving d1/a to d3/d1/a
+ moving d1/b to d3/d1/b
+ moving d1/ba to d3/d1/ba
+ moving d1/d11/a1 to d3/d1/d11/a1
+ moving d2/b to d3/d2/b
$ hg status -C
A d3/d1/a
d1/a
@@ -236,11 +236,11 @@
$ mkdir d3
$ mv d1 d2 d3
$ hg rename --after d1 d2 d3
- moving d1/a to d3/d1/a (glob)
- moving d1/b to d3/d1/b (glob)
- moving d1/ba to d3/d1/ba (glob)
- moving d1/d11/a1 to d3/d1/d11/a1 (glob)
- moving d2/b to d3/d2/b (glob)
+ moving d1/a to d3/d1/a
+ moving d1/b to d3/d1/b
+ moving d1/ba to d3/d1/ba
+ moving d1/d11/a1 to d3/d1/d11/a1
+ moving d2/b to d3/d2/b
$ hg status -C
A d3/d1/a
d1/a
@@ -267,7 +267,7 @@
$ hg rename d1/* d2
d2/b: not overwriting - file already committed
(hg rename --force to replace the file by recording a rename)
- moving d1/d11/a1 to d2/d11/a1 (glob)
+ moving d1/d11/a1 to d2/d11/a1
$ hg status -C
A d2/a
d1/a
@@ -304,14 +304,14 @@
abort: with multiple sources, destination must be an existing directory
[255]
-move every file under d1 to d2/d21 (glob)
+move every file under d1 to d2/d21
$ mkdir d2/d21
$ hg rename 'glob:d1/**' d2/d21
- moving d1/a to d2/d21/a (glob)
- moving d1/b to d2/d21/b (glob)
- moving d1/ba to d2/d21/ba (glob)
- moving d1/d11/a1 to d2/d21/a1 (glob)
+ moving d1/a to d2/d21/a
+ moving d1/b to d2/d21/b
+ moving d1/ba to d2/d21/ba
+ moving d1/d11/a1 to d2/d21/a1
$ hg status -C
A d2/d21/a
d1/a
@@ -329,15 +329,15 @@
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ rm -rf d2/d21
-move --after some files under d1 to d2/d21 (glob)
+move --after some files under d1 to d2/d21
$ mkdir d2/d21
$ mv d1/a d1/d11/a1 d2/d21
$ hg rename --after 'glob:d1/**' d2/d21
- moving d1/a to d2/d21/a (glob)
- d1/b: not recording move - d2/d21/b does not exist (glob)
- d1/ba: not recording move - d2/d21/ba does not exist (glob)
- moving d1/d11/a1 to d2/d21/a1 (glob)
+ moving d1/a to d2/d21/a
+ d1/b: not recording move - d2/d21/b does not exist
+ d1/ba: not recording move - d2/d21/ba does not exist
+ moving d1/d11/a1 to d2/d21/a1
$ hg status -C
A d2/d21/a
d1/a
@@ -353,8 +353,8 @@
$ mkdir d2/d21
$ hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21
- moving d1/a to d2/d21/a (glob)
- moving d1/d11/a1 to d2/d21/a1 (glob)
+ moving d1/a to d2/d21/a
+ moving d1/d11/a1 to d2/d21/a1
$ hg status -C
A d2/d21/a
d1/a
@@ -419,7 +419,7 @@
$ mkdir d3
$ hg rename d1/* d2/* d3
- moving d1/d11/a1 to d3/d11/a1 (glob)
+ moving d1/d11/a1 to d3/d11/a1
d3/b: not overwriting - d2/b collides with d1/b
$ hg status -C
A d3/a
@@ -445,7 +445,7 @@
moving a to ../d3/d1/a
moving b to ../d3/d1/b
moving ba to ../d3/d1/ba
- moving d11/a1 to ../d3/d1/d11/a1 (glob)
+ moving d11/a1 to ../d3/d1/d11/a1
$ hg status -C
A d3/d1/a
d1/a
@@ -471,7 +471,7 @@
moving a to ../d3/a
moving b to ../d3/b
moving ba to ../d3/ba
- moving d11/a1 to ../d3/d11/a1 (glob)
+ moving d11/a1 to ../d3/d11/a1
$ hg status -C
A d3/a
d1/a
@@ -492,9 +492,9 @@
move the parent tree with "hg rename .."
$ (cd d1/d11; hg rename .. ../../d3)
- moving ../a to ../../d3/a (glob)
- moving ../b to ../../d3/b (glob)
- moving ../ba to ../../d3/ba (glob)
+ moving ../a to ../../d3/a
+ moving ../b to ../../d3/b
+ moving ../ba to ../../d3/ba
moving a1 to ../../d3/d11/a1
$ hg status -C
A d3/a
@@ -517,9 +517,9 @@
$ hg remove d1/b
$ hg rename d1 d3
- moving d1/a to d3/a (glob)
- moving d1/ba to d3/ba (glob)
- moving d1/d11/a1 to d3/d11/a1 (glob)
+ moving d1/a to d3/a
+ moving d1/ba to d3/ba
+ moving d1/d11/a1 to d3/d11/a1
$ hg status -C
A d3/a
d1/a
@@ -603,7 +603,7 @@
check illegal path components
$ hg rename d1/d11/a1 .hg/foo
- abort: path contains illegal component: .hg/foo (glob)
+ abort: path contains illegal component: .hg/foo
[255]
$ hg status -C
$ hg rename d1/d11/a1 ../foo
@@ -613,7 +613,7 @@
$ mv d1/d11/a1 .hg/foo
$ hg rename --after d1/d11/a1 .hg/foo
- abort: path contains illegal component: .hg/foo (glob)
+ abort: path contains illegal component: .hg/foo
[255]
$ hg status -C
! d1/d11/a1
@@ -622,25 +622,25 @@
$ rm .hg/foo
$ hg rename d1/d11/a1 .hg
- abort: path contains illegal component: .hg/a1 (glob)
+ abort: path contains illegal component: .hg/a1
[255]
$ hg --config extensions.largefiles= rename d1/d11/a1 .hg
The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
- abort: path contains illegal component: .hg/a1 (glob)
+ abort: path contains illegal component: .hg/a1
[255]
$ hg status -C
$ hg rename d1/d11/a1 ..
- abort: ../a1 not under root '$TESTTMP' (glob)
+ abort: ../a1 not under root '$TESTTMP'
[255]
$ hg --config extensions.largefiles= rename d1/d11/a1 ..
The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
- abort: ../a1 not under root '$TESTTMP' (glob)
+ abort: ../a1 not under root '$TESTTMP'
[255]
$ hg status -C
$ mv d1/d11/a1 .hg
$ hg rename --after d1/d11/a1 .hg
- abort: path contains illegal component: .hg/a1 (glob)
+ abort: path contains illegal component: .hg/a1
[255]
$ hg status -C
! d1/d11/a1
@@ -649,7 +649,7 @@
$ rm .hg/a1
$ (cd d1/d11; hg rename ../../d2/b ../../.hg/foo)
- abort: path contains illegal component: .hg/foo (glob)
+ abort: path contains illegal component: .hg/foo
[255]
$ hg status -C
$ (cd d1/d11; hg rename ../../d2/b ../../../foo)
--- a/tests/test-resolve.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-resolve.t Mon Jan 22 17:53:02 2018 -0500
@@ -34,7 +34,7 @@
$ hg up -qC 2
$ hg merge --tool=internal:fail 1
0 files updated, 0 files merged, 0 files removed, 2 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
resolve -l should contain unresolved entries
@@ -223,7 +223,7 @@
$ hg up -qC 2
$ hg merge --tool=internal:fail 1
0 files updated, 0 files merged, 0 files removed, 2 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
resolve without arguments should suggest --all
@@ -249,7 +249,7 @@
.orig files should exists where specified
$ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
merging file1
- creating directory: $TESTTMP/repo/.hg/origbackups (glob)
+ creating directory: $TESTTMP/repo/.hg/origbackups
merging file2
warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
@@ -270,7 +270,7 @@
test .orig behavior with resolve
$ hg resolve -q file1 --tool "sh -c 'f --dump \"$TESTTMP/repo/file1.orig\"'"
- $TESTTMP/repo/file1.orig: (glob)
+ $TESTTMP/repo/file1.orig:
>>>
foo
baz
--- a/tests/test-revert-interactive.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revert-interactive.t Mon Jan 22 17:53:02 2018 -0500
@@ -52,64 +52,64 @@
> n
> EOF
reverting f
- reverting folder1/g (glob)
- removing folder1/i (glob)
- reverting folder2/h (glob)
+ reverting folder1/g
+ removing folder1/i
+ reverting folder2/h
remove added file folder1/i (Yn)? y
diff --git a/f b/f
2 hunks, 2 lines changed
examine changes to 'f'? [Ynesfdaq?] y
- @@ -1,5 +1,6 @@
- +a
+ @@ -1,6 +1,5 @@
+ -a
1
2
3
4
5
- revert change 1/6 to 'f'? [Ynesfdaq?] y
+ apply change 1/6 to 'f'? [Ynesfdaq?] y
- @@ -1,5 +2,6 @@
+ @@ -2,6 +1,5 @@
1
2
3
4
5
- +b
- revert change 2/6 to 'f'? [Ynesfdaq?] y
+ -b
+ apply change 2/6 to 'f'? [Ynesfdaq?] y
diff --git a/folder1/g b/folder1/g
2 hunks, 2 lines changed
examine changes to 'folder1/g'? [Ynesfdaq?] y
- @@ -1,5 +1,6 @@
- +c
+ @@ -1,6 +1,5 @@
+ -c
1
2
3
4
5
- revert change 3/6 to 'folder1/g'? [Ynesfdaq?] ?
+ apply change 3/6 to 'folder1/g'? [Ynesfdaq?] ?
- y - yes, revert this change
+ y - yes, apply this change
n - no, skip this change
e - edit this change manually
s - skip remaining changes to this file
- f - revert remaining changes to this file
+ f - apply remaining changes to this file
d - done, skip remaining changes and files
- a - revert all changes to all remaining files
- q - quit, reverting no changes
+ a - apply all changes to all remaining files
+ q - quit, applying no changes
? - ? (display help)
- revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+ apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
- @@ -1,5 +2,6 @@
+ @@ -2,6 +1,5 @@
1
2
3
4
5
- +d
- revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+ -d
+ apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
diff --git a/folder2/h b/folder2/h
2 hunks, 2 lines changed
@@ -140,8 +140,8 @@
Test that --interactive lift the need for --all
$ echo q | hg revert -i -r 2
- reverting folder1/g (glob)
- reverting folder2/h (glob)
+ reverting folder1/g
+ reverting folder2/h
diff --git a/folder1/g b/folder1/g
1 hunks, 1 lines changed
examine changes to 'folder1/g'? [Ynesfdaq?] q
@@ -157,12 +157,12 @@
1 hunks, 1 lines changed
examine changes to 'folder1/g'? [Ynesfdaq?] y
- @@ -3,3 +3,4 @@
+ @@ -3,4 +3,3 @@
3
4
5
- +d
- revert this change to 'folder1/g'? [Ynesfdaq?] n
+ -d
+ apply this change to 'folder1/g'? [Ynesfdaq?] n
$ ls folder1/
g
@@ -173,12 +173,12 @@
1 hunks, 1 lines changed
examine changes to 'folder1/g'? [Ynesfdaq?] y
- @@ -3,3 +3,4 @@
+ @@ -3,4 +3,3 @@
3
4
5
- +d
- revert this change to 'folder1/g'? [Ynesfdaq?] y
+ -d
+ apply this change to 'folder1/g'? [Ynesfdaq?] y
$ ls folder1/
g
@@ -198,53 +198,53 @@
> n
> EOF
reverting f
- reverting folder1/g (glob)
- removing folder1/i (glob)
- reverting folder2/h (glob)
+ reverting folder1/g
+ removing folder1/i
+ reverting folder2/h
remove added file folder1/i (Yn)? n
diff --git a/f b/f
2 hunks, 2 lines changed
examine changes to 'f'? [Ynesfdaq?] y
- @@ -1,5 +1,6 @@
- +a
+ @@ -1,6 +1,5 @@
+ -a
1
2
3
4
5
- revert change 1/6 to 'f'? [Ynesfdaq?] y
+ apply change 1/6 to 'f'? [Ynesfdaq?] y
- @@ -1,5 +2,6 @@
+ @@ -2,6 +1,5 @@
1
2
3
4
5
- +b
- revert change 2/6 to 'f'? [Ynesfdaq?] y
+ -b
+ apply change 2/6 to 'f'? [Ynesfdaq?] y
diff --git a/folder1/g b/folder1/g
2 hunks, 2 lines changed
examine changes to 'folder1/g'? [Ynesfdaq?] y
- @@ -1,5 +1,6 @@
- +c
+ @@ -1,6 +1,5 @@
+ -c
1
2
3
4
5
- revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+ apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
- @@ -1,5 +2,6 @@
+ @@ -2,6 +1,5 @@
1
2
3
4
5
- +d
- revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+ -d
+ apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
diff --git a/folder2/h b/folder2/h
2 hunks, 2 lines changed
@@ -368,77 +368,6 @@
$ cat k
42
-Check the experimental config to invert the selection:
- $ cat <<EOF >> $HGRCPATH
- > [experimental]
- > revertalternateinteractivemode=False
- > EOF
-
-
- $ hg up -C .
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ printf 'firstline\nc\n1\n2\n3\n 3\n5\nd\nlastline\n' > folder1/g
- $ hg diff --nodates
- diff -r a3d963a027aa folder1/g
- --- a/folder1/g
- +++ b/folder1/g
- @@ -1,7 +1,9 @@
- +firstline
- c
- 1
- 2
- 3
- -4
- + 3
- 5
- d
- +lastline
- $ hg revert -i <<EOF
- > y
- > y
- > y
- > n
- > EOF
- reverting folder1/g (glob)
- diff --git a/folder1/g b/folder1/g
- 3 hunks, 3 lines changed
- examine changes to 'folder1/g'? [Ynesfdaq?] y
-
- @@ -1,4 +1,5 @@
- +firstline
- c
- 1
- 2
- 3
- discard change 1/3 to 'folder1/g'? [Ynesfdaq?] y
-
- @@ -1,7 +2,7 @@
- c
- 1
- 2
- 3
- -4
- + 3
- 5
- d
- discard change 2/3 to 'folder1/g'? [Ynesfdaq?] y
-
- @@ -6,2 +7,3 @@
- 5
- d
- +lastline
- discard change 3/3 to 'folder1/g'? [Ynesfdaq?] n
-
- $ hg diff --nodates
- diff -r a3d963a027aa folder1/g
- --- a/folder1/g
- +++ b/folder1/g
- @@ -5,3 +5,4 @@
- 4
- 5
- d
- +lastline
-
$ hg update -C .
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg purge
@@ -463,11 +392,6 @@
When a line without EOL is selected during "revert -i" (issue5651)
- $ cat <<EOF >> $HGRCPATH
- > [experimental]
- > %unset revertalternateinteractivemode
- > EOF
-
$ hg init $TESTTMP/revert-i-eol
$ cd $TESTTMP/revert-i-eol
$ echo 0 > a
@@ -487,11 +411,11 @@
1 hunks, 1 lines changed
examine changes to 'a'? [Ynesfdaq?] y
- @@ -1,1 +1,2 @@
+ @@ -1,2 +1,1 @@
0
- +1
+ -1
\ No newline at end of file
- revert this change to 'a'? [Ynesfdaq?] y
+ apply this change to 'a'? [Ynesfdaq?] y
$ cat a
0
--- a/tests/test-revert.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revert.t Mon Jan 22 17:53:02 2018 -0500
@@ -91,8 +91,8 @@
$ echo z > e
$ hg revert --all -v --config 'ui.origbackuppath=.hg/origbackups'
- creating directory: $TESTTMP/repo/.hg/origbackups (glob)
- saving current version of e as $TESTTMP/repo/.hg/origbackups/e (glob)
+ creating directory: $TESTTMP/repo/.hg/origbackups
+ saving current version of e as $TESTTMP/repo/.hg/origbackups/e
reverting e
$ rm -rf .hg/origbackups
@@ -283,11 +283,11 @@
$ echo foo > newdir/newfile
$ hg add newdir/newfile
$ hg revert b newdir
- reverting b/b (glob)
- forgetting newdir/newfile (glob)
+ reverting b/b
+ forgetting newdir/newfile
$ echo foobar > b/b
$ hg revert .
- reverting b/b (glob)
+ reverting b/b
reverting a rename target should revert the source
@@ -336,8 +336,8 @@
$ hg revert -a --no-backup
reverting ignored
- reverting ignoreddir/file (glob)
- undeleting ignoreddir/removed (glob)
+ reverting ignoreddir/file
+ undeleting ignoreddir/removed
undeleting removed
$ hg st -mardi
--- a/tests/test-revlog-mmapindex.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revlog-mmapindex.t Mon Jan 22 17:53:02 2018 -0500
@@ -6,12 +6,13 @@
>
> from mercurial import (
> extensions,
+ > pycompat,
> util,
> )
>
> def extsetup(ui):
> def mmapread(orig, fp):
- > ui.write("mmapping %s\n" % fp.name)
+ > ui.write(b"mmapping %s\n" % pycompat.bytestr(fp.name))
> ui.flush()
> return orig(fp)
>
@@ -37,7 +38,7 @@
mmap index which is now more than 4k long
$ hg log -l 5 -T '{rev}\n' --config experimental.mmapindexthreshold=4k
- mmapping $TESTTMP/a/.hg/store/00changelog.i (glob)
+ mmapping $TESTTMP/a/.hg/store/00changelog.i
100
99
98
--- a/tests/test-revlog-raw.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revlog-raw.py Mon Jan 22 17:53:02 2018 -0500
@@ -20,7 +20,7 @@
# The test wants to control whether to use delta explicitly, based on
# "storedeltachains".
-revlog.revlog._isgooddelta = lambda self, d, textlen: self.storedeltachains
+revlog.revlog._isgooddeltainfo = lambda self, d, textlen: self.storedeltachains
def abort(msg):
print('abort: %s' % msg)
--- a/tests/test-revset-outgoing.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revset-outgoing.t Mon Jan 22 17:53:02 2018 -0500
@@ -38,7 +38,7 @@
$ cat .hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/a#stable (glob)
+ default = $TESTTMP/a#stable
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -70,7 +70,7 @@
$ hg tout
- comparing with $TESTTMP/a (glob)
+ comparing with $TESTTMP/a
searching for changes
2:1d4099801a4e: '3' stable
@@ -90,7 +90,7 @@
$ cat .hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/a#stable (glob)
+ default = $TESTTMP/a#stable
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -105,12 +105,11 @@
green = ../a#default
$ hg tout green
- comparing with green
- abort: repository green not found!
+ abort: repository green does not exist!
[255]
$ hg tlog -r 'outgoing("green")'
- abort: repository green not found!
+ abort: repository green does not exist!
[255]
$ cd ..
--- a/tests/test-revset.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revset.t Mon Jan 22 17:53:02 2018 -0500
@@ -792,7 +792,7 @@
* set:
<baseset []>
-infix/suffix resolution of ^ operator (issue2884):
+infix/suffix resolution of ^ operator (issue2884, issue5764):
x^:y means (x^):y
@@ -818,6 +818,17 @@
1
2
+ $ try '1^..2'
+ (dagrange
+ (parentpost
+ (symbol '1'))
+ (symbol '2'))
+ * set:
+ <baseset+ [0, 1, 2]>
+ 0
+ 1
+ 2
+
$ try '9^:'
(rangepost
(parentpost
@@ -827,6 +838,24 @@
8
9
+ $ try '9^::'
+ (dagrangepost
+ (parentpost
+ (symbol '9')))
+ * set:
+ <generatorsetasc+>
+ 8
+ 9
+
+ $ try '9^..'
+ (dagrangepost
+ (parentpost
+ (symbol '9')))
+ * set:
+ <generatorsetasc+>
+ 8
+ 9
+
x^:y should be resolved before omitting group operators
$ try '1^(:2)'
@@ -944,6 +973,14 @@
hg: parse error: ^ expects a number 0, 1, or 2
[255]
+'::' itself isn't a valid expression
+
+ $ try '::'
+ (dagrangeall
+ None)
+ hg: parse error: can't use '::' in this context
+ [255]
+
ancestor can accept 0 or more arguments
$ log 'ancestor()'
@@ -1484,7 +1521,7 @@
$ hg debugrevspec -s 'last(0::)'
* set:
<baseset slice=0:1
- <generatorset->>
+ <generatorsetasc->>
9
$ hg identify -r '0::' --num
9
--- a/tests/test-revset2.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-revset2.t Mon Jan 22 17:53:02 2018 -0500
@@ -152,7 +152,7 @@
* set:
<addset
<baseset- [1, 3, 5]>,
- <generatorset+>>
+ <generatorsetdesc+>>
5
3
1
@@ -174,7 +174,7 @@
(symbol '5'))))))
* set:
<addset+
- <generatorset+>,
+ <generatorsetdesc+>,
<baseset- [1, 3, 5]>>
0
1
@@ -927,7 +927,7 @@
(symbol 'merge')
None))
* set:
- <generatorset+>
+ <generatorsetasc+>
6
7
@@ -1636,6 +1636,8 @@
> \|/ | # amend: F -> G
> A A Z # amend: A -> Z
> EOS
+ 3 new orphan changesets
+ 3 new content-divergent changesets
$ hg log -r 'successors(Z)' -T '{desc}\n'
Z
--- a/tests/test-run-tests.py Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-run-tests.py Mon Jan 22 17:53:02 2018 -0500
@@ -54,6 +54,8 @@
enable windows matching on any os
>>> _osaltsep = os.altsep
>>> os.altsep = True
+ >>> _osname = os.name
+ >>> os.name = 'nt'
valid match on windows
>>> lm(b'g/a*/d (glob)\n', b'g\\abc/d\n')
@@ -65,10 +67,13 @@
missing glob
>>> lm(b'/g/c/d/fg\n', b'\\g\\c\\d/fg\n')
- 'special: +glob'
+ True
+ >>> lm(b'/g/c/d/fg\n', b'\\g\\c\\d\\fg\r\n')
+ True
restore os.altsep
>>> os.altsep = _osaltsep
+ >>> os.name = _osname
"""
pass
@@ -78,6 +83,8 @@
disable windows matching on any os
>>> _osaltsep = os.altsep
>>> os.altsep = False
+ >>> _osname = os.name
+ >>> os.name = 'nt'
backslash does not match slash
>>> lm(b'h/a* (glob)\n', b'h\\ab\n')
@@ -93,6 +100,7 @@
restore os.altsep
>>> os.altsep = _osaltsep
+ >>> os.name = _osname
"""
pass
--- a/tests/test-run-tests.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-run-tests.t Mon Jan 22 17:53:02 2018 -0500
@@ -32,8 +32,7 @@
#if execbit
$ touch hg
$ run-tests.py --with-hg=./hg
- Usage: run-tests.py [options] [tests]
-
+ usage: run-tests.py [options] [tests]
run-tests.py: error: --with-hg must specify an executable hg script
[2]
$ rm hg
@@ -98,19 +97,22 @@
test churn with globs
$ cat > test-failure.t <<EOF
- > $ echo "bar-baz"; echo "bar-bad"
+ > $ echo "bar-baz"; echo "bar-bad"; echo foo
> bar*bad (glob)
> bar*baz (glob)
+ > | fo (re)
> EOF
$ rt test-failure.t
--- $TESTTMP/test-failure.t
+++ $TESTTMP/test-failure.t.err
- @@ -1,3 +1,3 @@
- $ echo "bar-baz"; echo "bar-bad"
+ @@ -1,4 +1,4 @@
+ $ echo "bar-baz"; echo "bar-bad"; echo foo
+ bar*baz (glob)
bar*bad (glob)
- bar*baz (glob)
+ - | fo (re)
+ + foo
ERROR: test-failure.t output changed
!
@@ -126,11 +128,13 @@
\x1b[38;5;124m--- $TESTTMP/test-failure.t\x1b[39m (esc)
\x1b[38;5;34m+++ $TESTTMP/test-failure.t.err\x1b[39m (esc)
- \x1b[38;5;90;01m@@ -1,3 +1,3 @@\x1b[39;00m (esc)
- $ echo "bar-baz"; echo "bar-bad"
+ \x1b[38;5;90;01m@@ -1,4 +1,4 @@\x1b[39;00m (esc)
+ $ echo "bar-baz"; echo "bar-bad"; echo foo
\x1b[38;5;34m+ bar*baz (glob)\x1b[39m (esc)
bar*bad (glob)
\x1b[38;5;124m- bar*baz (glob)\x1b[39m (esc)
+ \x1b[38;5;124m- | fo (re)\x1b[39m (esc)
+ \x1b[38;5;34m+ foo\x1b[39m (esc)
\x1b[38;5;88mERROR: \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m output changed\x1b[39m (esc)
!
@@ -145,11 +149,13 @@
--- $TESTTMP/test-failure.t
+++ $TESTTMP/test-failure.t.err
- @@ -1,3 +1,3 @@
- $ echo "bar-baz"; echo "bar-bad"
+ @@ -1,4 +1,4 @@
+ $ echo "bar-baz"; echo "bar-bad"; echo foo
+ bar*baz (glob)
bar*bad (glob)
- bar*baz (glob)
+ - | fo (re)
+ + foo
ERROR: test-failure.t output changed
!
@@ -674,7 +680,7 @@
Interactive with custom view
$ echo 'n' | rt -i --view echo
- $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob)
+ $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err
Accept this change? [n]* (glob)
ERROR: test-failure.t output changed
!.
@@ -686,7 +692,7 @@
View the fix
$ echo 'y' | rt --view echo
- $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob)
+ $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err
ERROR: test-failure.t output changed
!.
@@ -697,12 +703,14 @@
Accept the fix
- $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
- $ echo " saved backup bundle to \$TESTTMP/foo.hg" >> test-failure.t
- $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
- $ echo " saved backup bundle to \$TESTTMP/foo.hg (glob)" >> test-failure.t
- $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
- $ echo " saved backup bundle to \$TESTTMP/*.hg (glob)" >> test-failure.t
+ $ cat >> test-failure.t <<EOF
+ > $ echo 'saved backup bundle to \$TESTTMP/foo.hg'
+ > saved backup bundle to \$TESTTMP/foo.hg
+ > $ echo 'saved backup bundle to \$TESTTMP/foo.hg'
+ > saved backup bundle to $TESTTMP\\foo.hg
+ > $ echo 'saved backup bundle to \$TESTTMP/foo.hg'
+ > saved backup bundle to \$TESTTMP/*.hg (glob)
+ > EOF
$ echo 'y' | rt -i 2>&1
--- $TESTTMP/test-failure.t
@@ -714,15 +722,14 @@
This is a noop statement so that
this test is still more bytes than success.
pad pad pad pad............................................................
- @@ -9,7 +9,7 @@
- pad pad pad pad............................................................
- pad pad pad pad............................................................
+ @@ -11,6 +11,6 @@
+ $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+ saved backup bundle to $TESTTMP/foo.hg
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
- - saved backup bundle to $TESTTMP/foo.hg
- + saved backup bundle to $TESTTMP/foo.hg* (glob)
+ - saved backup bundle to $TESTTMP\foo.hg
+ + saved backup bundle to $TESTTMP/foo.hg
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
- saved backup bundle to $TESTTMP/foo.hg* (glob)
- $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+ saved backup bundle to $TESTTMP/*.hg (glob)
Accept this change? [n] ..
# Ran 2 tests, 0 skipped, 0 failed.
@@ -738,9 +745,9 @@
pad pad pad pad............................................................
pad pad pad pad............................................................
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
- saved backup bundle to $TESTTMP/foo.hg (glob)<
+ saved backup bundle to $TESTTMP/foo.hg
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
- saved backup bundle to $TESTTMP/foo.hg (glob)<
+ saved backup bundle to $TESTTMP/foo.hg
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
saved backup bundle to $TESTTMP/*.hg (glob)<
@@ -853,8 +860,8 @@
test --tmpdir support
$ rt --tmpdir=$TESTTMP/keep test-success.t
- Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t (glob)
- Keeping threadtmp dir: $TESTTMP/keep/child1 (glob)
+ Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t
+ Keeping threadtmp dir: $TESTTMP/keep/child1
.
# Ran 1 tests, 0 skipped, 0 failed.
@@ -1208,7 +1215,12 @@
> #else
> $ test "\$TESTDIR" = "$TESTTMP"/anothertests
> #endif
- > $ test "\$RUNTESTDIR" = "$TESTDIR"
+ > If this prints a path, that means RUNTESTDIR didn't equal
+ > TESTDIR as it should have.
+ > $ test "\$RUNTESTDIR" = "$TESTDIR" || echo "\$RUNTESTDIR"
+ > This should print the start of check-code. If this passes but the
+ > previous check failed, that means we found a copy of check-code at whatever
+ > RUNTESTSDIR ended up containing, even though it doesn't match TESTDIR.
> $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@'
> #!USRBINENVPY
> #
@@ -1261,6 +1273,58 @@
.
# Ran 1 tests, 0 skipped, 0 failed.
+support for automatically discovering test if arg is a folder
+ $ mkdir tmp && cd tmp
+
+ $ cat > test-uno.t << EOF
+ > $ echo line
+ > line
+ > EOF
+
+ $ cp test-uno.t test-dos.t
+ $ cd ..
+ $ cp -R tmp tmpp
+ $ cp tmp/test-uno.t test-solo.t
+
+ $ rt tmp/ test-solo.t tmpp
+ .....
+ # Ran 5 tests, 0 skipped, 0 failed.
+ $ rm -rf tmp tmpp
+
+support for running run-tests.py from another directory
+ $ mkdir tmp && cd tmp
+
+ $ cat > useful-file.sh << EOF
+ > important command
+ > EOF
+
+ $ cat > test-folder.t << EOF
+ > $ cat \$TESTDIR/useful-file.sh
+ > important command
+ > EOF
+
+ $ cat > test-folder-fail.t << EOF
+ > $ cat \$TESTDIR/useful-file.sh
+ > important commando
+ > EOF
+
+ $ cd ..
+ $ rt tmp/test-*.t
+
+ --- $TESTTMP/anothertests/tmp/test-folder-fail.t
+ +++ $TESTTMP/anothertests/tmp/test-folder-fail.t.err
+ @@ -1,2 +1,2 @@
+ $ cat $TESTDIR/useful-file.sh
+ - important commando
+ + important command
+
+ ERROR: test-folder-fail.t output changed
+ !.
+ Failed test-folder-fail.t: output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
support for bisecting failed tests automatically
$ hg init bisect
$ cd bisect
@@ -1324,8 +1388,7 @@
[1]
$ rt --bisect-repo=../test-bisect test-bisect-dependent.t
- Usage: run-tests.py [options] [tests]
-
+ usage: run-tests.py [options] [tests]
run-tests.py: error: --bisect-repo cannot be used without --known-good-rev
[2]
@@ -1469,3 +1532,74 @@
# Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
+
+Test TESTCASE variable
+
+ $ cat > test-cases-ab.t <<'EOF'
+ > $ dostuff() {
+ > > echo "In case $TESTCASE"
+ > > }
+ > #testcases A B
+ > #if A
+ > $ dostuff
+ > In case A
+ > #endif
+ > #if B
+ > $ dostuff
+ > In case B
+ > #endif
+ > EOF
+ $ rt test-cases-ab.t
+ ..
+ # Ran 2 tests, 0 skipped, 0 failed.
+
+Test automatic pattern replacement
+
+ $ cat << EOF >> common-pattern.py
+ > substitutions = [
+ > (br'foo-(.*)\\b',
+ > br'\$XXX=\\1\$'),
+ > (br'bar\\n',
+ > br'\$YYY$\\n'),
+ > ]
+ > EOF
+
+ $ cat << EOF >> test-substitution.t
+ > $ echo foo-12
+ > \$XXX=12$
+ > $ echo foo-42
+ > \$XXX=42$
+ > $ echo bar prior
+ > bar prior
+ > $ echo lastbar
+ > last\$YYY$
+ > $ echo foo-bar foo-baz
+ > EOF
+
+ $ rt test-substitution.t
+
+ --- $TESTTMP/anothertests/cases/test-substitution.t
+ +++ $TESTTMP/anothertests/cases/test-substitution.t.err
+ @@ -7,3 +7,4 @@
+ $ echo lastbar
+ last$YYY$
+ $ echo foo-bar foo-baz
+ + $XXX=bar foo-baz$
+
+ ERROR: test-substitution.t output changed
+ !
+ Failed test-substitution.t: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--extra-config-opt works
+
+ $ cat << EOF >> test-config-opt.t
+ > $ hg init test-config-opt
+ > $ hg -R test-config-opt purge
+ > EOF
+
+ $ rt --extra-config-opt extensions.purge= test-config-opt.t
+ .
+ # Ran 1 tests, 0 skipped, 0 failed.
--- a/tests/test-setdiscovery.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-setdiscovery.t Mon Jan 22 17:53:02 2018 -0500
@@ -16,11 +16,17 @@
> echo "% -- a -> b set"
> hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
> echo
+ > echo "% -- a -> b set (tip only)"
+ > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
+ > echo
> echo "% -- b -> a tree"
> hg -R b debugdiscovery a --verbose --old
> echo
> echo "% -- b -> a set"
> hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
+ > echo
+ > echo "% -- b -> a set (tip only)"
+ > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
> cd ..
> }
@@ -48,6 +54,13 @@
common heads: 01241442b3c2 b5714e113bc0
local is subset
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ all local heads known remotely
+ common heads: b5714e113bc0
+
% -- b -> a tree
comparing with a
searching for changes
@@ -62,6 +75,14 @@
all remote heads known locally
common heads: 01241442b3c2 b5714e113bc0
remote is subset
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ all remote heads known locally
+ common heads: 01241442b3c2 b5714e113bc0
+ remote is subset
Many new:
@@ -86,6 +107,16 @@
2 total queries in *.????s (glob)
common heads: bebd167eb94d
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 31, sample size is: 31
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
+
% -- b -> a tree
comparing with a
searching for changes
@@ -101,6 +132,16 @@
query 2; still undecided: 2, sample size is: 2
2 total queries in *.????s (glob)
common heads: bebd167eb94d
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ taking initial sample
+ searching: 2 queries
+ query 2; still undecided: 2, sample size is: 2
+ 2 total queries in *.????s (glob)
+ common heads: bebd167eb94d
Both sides many new with stub:
@@ -124,6 +165,16 @@
2 total queries in *.????s (glob)
common heads: 2dc09a01254d
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 31, sample size is: 31
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
+
% -- b -> a tree
comparing with a
searching for changes
@@ -139,6 +190,16 @@
query 2; still undecided: 29, sample size is: 29
2 total queries in *.????s (glob)
common heads: 2dc09a01254d
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ taking initial sample
+ searching: 2 queries
+ query 2; still undecided: 29, sample size is: 29
+ 2 total queries in *.????s (glob)
+ common heads: 2dc09a01254d
Both many new:
@@ -163,6 +224,16 @@
2 total queries in *.????s (glob)
common heads: 66f7d451a68b
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 31, sample size is: 31
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
+
% -- b -> a tree
comparing with a
searching for changes
@@ -178,6 +249,16 @@
query 2; still undecided: 31, sample size is: 31
2 total queries in *.????s (glob)
common heads: 66f7d451a68b
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 31, sample size is: 31
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
Both many new skewed:
@@ -202,6 +283,16 @@
2 total queries in *.????s (glob)
common heads: 66f7d451a68b
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 51, sample size is: 51
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
+
% -- b -> a tree
comparing with a
searching for changes
@@ -217,6 +308,16 @@
query 2; still undecided: 31, sample size is: 31
2 total queries in *.????s (glob)
common heads: 66f7d451a68b
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 31, sample size is: 31
+ 2 total queries in *.????s (glob)
+ common heads: 66f7d451a68b
Both many new on top of long history:
@@ -244,6 +345,19 @@
3 total queries in *.????s (glob)
common heads: 7ead0cba2838
+ % -- a -> b set (tip only)
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 1049, sample size is: 11
+ sampling from both directions
+ searching: 3 queries
+ query 3; still undecided: 31, sample size is: 31
+ 3 total queries in *.????s (glob)
+ common heads: 7ead0cba2838
+
% -- b -> a tree
comparing with a
searching for changes
@@ -262,6 +376,19 @@
query 3; still undecided: 15, sample size is: 15
3 total queries in *.????s (glob)
common heads: 7ead0cba2838
+
+ % -- b -> a set (tip only)
+ comparing with a
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 1029, sample size is: 11
+ sampling from both directions
+ searching: 3 queries
+ query 3; still undecided: 15, sample size is: 15
+ 3 total queries in *.????s (glob)
+ common heads: 7ead0cba2838
One with >200 heads, which used to use up all of the sample:
@@ -327,6 +454,18 @@
query 6; still undecided: \d+, sample size is: \d+ (re)
6 total queries in *.????s (glob)
common heads: 3ee37d65064a
+ $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ searching: 2 queries
+ query 2; still undecided: 303, sample size is: 9
+ sampling from both directions
+ searching: 3 queries
+ query 3; still undecided: 3, sample size is: 3
+ 3 total queries in *.????s (glob)
+ common heads: 3ee37d65064a
Test actual protocol when pulling one new head in addition to common heads
@@ -350,9 +489,9 @@
$ killdaemons.py
$ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
$ cat errors.log
$ cd ..
--- a/tests/test-share.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-share.t Mon Jan 22 17:53:02 2018 -0500
@@ -41,14 +41,14 @@
Some sed versions appends newline, some don't, and some just fails
$ cat .hg/sharedpath; echo
- $TESTTMP/repo1/.hg (glob)
+ $TESTTMP/repo1/.hg
trailing newline on .hg/sharedpath is ok
$ hg tip -q
0:d3873e73d99e
$ echo '' >> .hg/sharedpath
$ cat .hg/sharedpath
- $TESTTMP/repo1/.hg (glob)
+ $TESTTMP/repo1/.hg
$ hg tip -q
0:d3873e73d99e
@@ -278,7 +278,7 @@
bm3 4:62f4ded848e4
* bm4 5:92793bfc8cad
$ hg push -B bm4
- pushing to $TESTTMP/repo3 (glob)
+ pushing to $TESTTMP/repo3
searching for changes
adding changesets
adding manifests
@@ -348,7 +348,7 @@
bm1 3:b87954705719
bm4 5:92793bfc8cad
$ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
- pulling from $TESTTMP/repo4 (glob)
+ pulling from $TESTTMP/repo4
searching for changes
no changes found
adding remote bookmark bm3
@@ -358,7 +358,7 @@
bm1 3:b87954705719
bm4 5:92793bfc8cad
$ hg pull $TESTTMP/repo4
- pulling from $TESTTMP/repo4 (glob)
+ pulling from $TESTTMP/repo4
searching for changes
no changes found
adding remote bookmark bm3
@@ -396,7 +396,7 @@
$ hg share -U thisdir/orig thisdir/abs
$ hg share -U --relative thisdir/abs thisdir/rel
$ cat thisdir/rel/.hg/sharedpath
- ../../orig/.hg (no-eol) (glob)
+ ../../orig/.hg (no-eol)
$ grep shared thisdir/*/.hg/requires
thisdir/abs/.hg/requires:shared
thisdir/rel/.hg/requires:shared
@@ -406,22 +406,22 @@
$ cd thisdir
$ hg -R rel root
- $TESTTMP/thisdir/rel (glob)
+ $TESTTMP/thisdir/rel
$ cd ..
now test that relative paths really are relative, survive across
renames and changes of PWD
$ hg -R thisdir/abs root
- $TESTTMP/thisdir/abs (glob)
+ $TESTTMP/thisdir/abs
$ hg -R thisdir/rel root
- $TESTTMP/thisdir/rel (glob)
+ $TESTTMP/thisdir/rel
$ mv thisdir thatdir
$ hg -R thatdir/abs root
- abort: .hg/sharedpath points to nonexistent directory $TESTTMP/thisdir/orig/.hg! (glob)
+ abort: .hg/sharedpath points to nonexistent directory $TESTTMP/thisdir/orig/.hg!
[255]
$ hg -R thatdir/rel root
- $TESTTMP/thatdir/rel (glob)
+ $TESTTMP/thatdir/rel
test unshare relshared repo
--- a/tests/test-shelve.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-shelve.t Mon Jan 22 17:53:02 2018 -0500
@@ -158,7 +158,7 @@
$ echo a >> a/a
$ hg mv b b.rename
- moving b/b to b.rename/b (glob)
+ moving b/b to b.rename/b
$ hg cp c c.copy
$ hg status -C
M a/a
@@ -352,7 +352,7 @@
# Unresolved merge conflicts:
#
- # a/a (glob)
+ # a/a
#
# To mark files as resolved: hg resolve --mark FILE
@@ -643,7 +643,7 @@
$ hg rebase -d 1 --config extensions.rebase=
rebasing 2:323bfa07f744 "xyz" (tip)
merging x
- saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-rebase.hg
$ hg unshelve
unshelving change 'default'
rebasing shelved changes
@@ -899,7 +899,7 @@
is a no-op), works (issue4398)
$ hg revert -a -r .
- reverting a/a (glob)
+ reverting a/a
$ hg resolve -m a/a
(no more unresolved files)
continue: hg unshelve --continue
@@ -1271,7 +1271,7 @@
$ rm .hg/unshelverebasestate
$ hg unshelve --abort
unshelve of 'default' aborted
- abort: (No such file or directory|The system cannot find the file specified) (re)
+ abort: $ENOENT$
[255]
Can the user leave the current state?
$ hg up -C .
--- a/tests/test-show-work.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-show-work.t Mon Jan 22 17:53:02 2018 -0500
@@ -242,6 +242,7 @@
> EOF
$ hg debugobsolete `hg log -r 'desc("commit 2")' -T "{node}"`
obsoleted 1 changesets
+ 1 new orphan changesets
$ hg show work --color=debug
@ [log.changeset changeset.draft changeset.unstable instability.orphan|32f3] [log.description|commit 3]
x [log.changeset changeset.draft changeset.obsolete|6a75] [log.description|commit 2]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-single-head.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,204 @@
+=====================
+Test workflow options
+=====================
+
+ $ . "$TESTDIR/testlib/obsmarker-common.sh"
+
+Test single head enforcing - Setup
+=============================================
+
+ $ cat << EOF >> $HGRCPATH
+ > [experimental]
+ > evolution = all
+ > EOF
+ $ hg init single-head-server
+ $ cd single-head-server
+ $ cat <<EOF >> .hg/hgrc
+ > [phases]
+ > publish = no
+ > [experimental]
+ > single-head-per-branch = yes
+ > EOF
+ $ mkcommit ROOT
+ $ mkcommit c_dA0
+ $ cd ..
+
+ $ hg clone single-head-server client
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test single head enforcing - with branch only
+---------------------------------------------
+
+ $ cd client
+
+continuing the current defaultbranch
+
+ $ mkcommit c_dB0
+ $ hg push
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+creating a new branch
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg branch branch_A
+ marked working directory as branch branch_A
+ (branches are permanent and global, did you want a bookmark?)
+ $ mkcommit c_aC0
+ $ hg push --new-branch
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+
+Create a new head on the default branch
+
+ $ hg up 'desc("c_dA0")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit c_dD0
+ created new head
+ $ hg push -f
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ transaction abort!
+ rollback completed
+ abort: rejecting multiple heads on branch "default"
+ (2 heads: 286d02a6e2a2 9bf953aa81f6)
+ [255]
+
+remerge them
+
+ $ hg merge
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ mkcommit c_dE0
+ $ hg push
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 2 files
+
+Test single head enforcing - after rewrite
+------------------------------------------
+
+ $ mkcommit c_dF0
+ $ hg push
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ $ hg commit --amend -m c_dF1
+ $ hg push
+ pushing to $TESTTMP/single-head-server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+
+Check it does to interfer with strip
+------------------------------------
+
+setup
+
+ $ hg branch branch_A --force
+ marked working directory as branch branch_A
+ $ mkcommit c_aG0
+ created new head
+ $ hg update 'desc("c_dF1")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit c_dH0
+ $ hg update 'desc("c_aG0")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ mkcommit c_aI0
+ $ hg log -G
+ @ changeset: 10:49003e504178
+ |\ branch: branch_A
+ | | tag: tip
+ | | parent: 8:a33fb808fb4b
+ | | parent: 3:840af1c6bc88
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: c_aI0
+ | |
+ | | o changeset: 9:fe47ea669cea
+ | | | parent: 7:99a2dc242c5d
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | summary: c_dH0
+ | | |
+ | o | changeset: 8:a33fb808fb4b
+ | |/ branch: branch_A
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: c_aG0
+ | |
+ | o changeset: 7:99a2dc242c5d
+ | | parent: 5:6ed1df20edb1
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: c_dF1
+ | |
+ | o changeset: 5:6ed1df20edb1
+ | |\ parent: 4:9bf953aa81f6
+ | | | parent: 2:286d02a6e2a2
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | summary: c_dE0
+ | | |
+ | | o changeset: 4:9bf953aa81f6
+ | | | parent: 1:134bc3852ad2
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | summary: c_dD0
+ | | |
+ o | | changeset: 3:840af1c6bc88
+ | | | branch: branch_A
+ | | | parent: 0:ea207398892e
+ | | | user: test
+ | | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | | summary: c_aC0
+ | | |
+ | o | changeset: 2:286d02a6e2a2
+ | |/ user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: c_dB0
+ | |
+ | o changeset: 1:134bc3852ad2
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: c_dA0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+actual stripping
+
+ $ hg strip --config extensions.strip= --rev 'desc("c_dH0")'
+ saved backup bundle to $TESTTMP/client/.hg/strip-backup/fe47ea669cea-a41bf5a9-backup.hg
+ warning: ignoring unknown working parent 49003e504178!
+
--- a/tests/test-sparse-profiles.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-sparse-profiles.t Mon Jan 22 17:53:02 2018 -0500
@@ -125,7 +125,7 @@
warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 2 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ rm *.orig
--- a/tests/test-sparse.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-sparse.t Mon Jan 22 17:53:02 2018 -0500
@@ -224,7 +224,7 @@
merging hide
warning: conflicts while merging hide! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg debugsparse
[exclude]
@@ -257,7 +257,7 @@
4 files changed, 4 insertions(+), 2 deletions(-)
$ hg strip -r . -k
- saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/39278f7c08a9-ce59e002-backup.hg (glob)
+ saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/39278f7c08a9-ce59e002-backup.hg
$ hg status
M show
? show2
@@ -267,7 +267,7 @@
$ hg commit -Aqm "add show2"
$ hg rebase -d 1 --config extensions.rebase=
rebasing 2:bdde55290160 "add show2" (tip)
- saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/bdde55290160-216ed9c6-rebase.hg (glob)
+ saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/bdde55290160-216ed9c6-rebase.hg
Verify log --sparse only shows commits that affect the sparse checkout
@@ -284,6 +284,27 @@
$ hg status
? dir1/dir2/file
+Mix files and subdirectories, both "glob:" and unprefixed
+
+ $ hg debugsparse --reset
+ $ touch dir1/notshown
+ $ hg commit -A dir1/notshown -m "notshown"
+ $ hg debugsparse --include 'dir1/dir2'
+ $ $PYTHON $TESTDIR/list-tree.py . | egrep -v '\.[\/]\.hg'
+ ./
+ ./dir1/
+ ./dir1/dir2/
+ ./dir1/dir2/file
+ ./hide.orig
+ $ hg debugsparse --delete 'dir1/dir2'
+ $ hg debugsparse --include 'glob:dir1/dir2'
+ $ $PYTHON $TESTDIR/list-tree.py . | egrep -v '\.[\/]\.hg'
+ ./
+ ./dir1/
+ ./dir1/dir2/
+ ./dir1/dir2/file
+ ./hide.orig
+
Test that add -s adds dirs to sparse profile
$ hg debugsparse --reset
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-split.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,530 @@
+#testcases obsstore-on obsstore-off
+
+ $ cat > $TESTTMP/editor.py <<EOF
+ > #!$PYTHON
+ > import os, sys
+ > path = os.path.join(os.environ['TESTTMP'], 'messages')
+ > messages = open(path).read().split('--\n')
+ > prompt = open(sys.argv[1]).read()
+ > sys.stdout.write(''.join('EDITOR: %s' % l for l in prompt.splitlines(True)))
+ > sys.stdout.flush()
+ > with open(sys.argv[1], 'w') as f:
+ > f.write(messages[0])
+ > with open(path, 'w') as f:
+ > f.write('--\n'.join(messages[1:]))
+ > EOF
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
+ > split=
+ > [ui]
+ > interactive=1
+ > color=no
+ > paginate=never
+ > [diff]
+ > git=1
+ > unified=0
+ > [alias]
+ > glog=log -G -T '{rev}:{node|short} {desc} {bookmarks}\n'
+ > EOF
+
+#if obsstore-on
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > evolution=all
+ > EOF
+#endif
+
+ $ hg init a
+ $ cd a
+
+Nothing to split
+
+ $ hg split
+ nothing to split
+ [1]
+
+ $ hg commit -m empty --config ui.allowemptycommit=1
+ $ hg split
+ abort: cannot split an empty revision
+ [255]
+
+ $ rm -rf .hg
+ $ hg init
+
+Cannot split working directory
+
+ $ hg split -r 'wdir()'
+ abort: cannot split working directory
+ [255]
+
+Generate some content. The sed filter drop CR on Windows, which is dropped in
+the a > b line.
+
+ $ $TESTDIR/seq.py 1 5 | sed 's/\r$//' >> a
+ $ hg ci -m a1 -A a -q
+ $ hg bookmark -i r1
+ $ sed 's/1/11/;s/3/33/;s/5/55/' a > b
+ $ mv b a
+ $ hg ci -m a2 -q
+ $ hg bookmark -i r2
+
+Cannot split a public changeset
+
+ $ hg phase --public -r 'all()'
+ $ hg split .
+ abort: cannot split public changeset
+ (see 'hg help phases' for details)
+ [255]
+
+ $ hg phase --draft -f -r 'all()'
+
+Cannot split while working directory is dirty
+
+ $ touch dirty
+ $ hg add dirty
+ $ hg split .
+ abort: uncommitted changes
+ [255]
+ $ hg forget dirty
+ $ rm dirty
+
+Split a head
+
+ $ cp -R . ../b
+ $ cp -R . ../c
+
+ $ hg bookmark r3
+
+ $ hg split 'all()'
+ abort: cannot split multiple revisions
+ [255]
+
+ $ runsplit() {
+ > cat > $TESTTMP/messages <<EOF
+ > split 1
+ > --
+ > split 2
+ > --
+ > split 3
+ > EOF
+ > cat <<EOF | hg split "$@"
+ > y
+ > y
+ > y
+ > y
+ > y
+ > y
+ > EOF
+ > }
+
+ $ HGEDITOR=false runsplit
+ diff --git a/a b/a
+ 1 hunks, 1 lines changed
+ examine changes to 'a'? [Ynesfdaq?] y
+
+ @@ -5,1 +5,1 @@ 4
+ -5
+ +55
+ record this change to 'a'? [Ynesfdaq?] y
+
+ transaction abort!
+ rollback completed
+ abort: edit failed: false exited with status 1
+ [255]
+ $ hg status
+
+ $ HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py"
+ $ runsplit
+ diff --git a/a b/a
+ 1 hunks, 1 lines changed
+ examine changes to 'a'? [Ynesfdaq?] y
+
+ @@ -5,1 +5,1 @@ 4
+ -5
+ +55
+ record this change to 'a'? [Ynesfdaq?] y
+
+ EDITOR: HG: Splitting 1df0d5c5a3ab. Write commit message for the first split changeset.
+ EDITOR: a2
+ EDITOR:
+ EDITOR:
+ EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed.
+ EDITOR: HG: Leave message empty to abort commit.
+ EDITOR: HG: --
+ EDITOR: HG: user: test
+ EDITOR: HG: branch 'default'
+ EDITOR: HG: changed a
+ created new head
+ diff --git a/a b/a
+ 1 hunks, 1 lines changed
+ examine changes to 'a'? [Ynesfdaq?] y
+
+ @@ -3,1 +3,1 @@ 2
+ -3
+ +33
+ record this change to 'a'? [Ynesfdaq?] y
+
+ EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
+ EDITOR: HG: - e704349bd21b: split 1
+ EDITOR: HG: Write commit message for the next split changeset.
+ EDITOR: a2
+ EDITOR:
+ EDITOR:
+ EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed.
+ EDITOR: HG: Leave message empty to abort commit.
+ EDITOR: HG: --
+ EDITOR: HG: user: test
+ EDITOR: HG: branch 'default'
+ EDITOR: HG: changed a
+ diff --git a/a b/a
+ 1 hunks, 1 lines changed
+ examine changes to 'a'? [Ynesfdaq?] y
+
+ @@ -1,1 +1,1 @@
+ -1
+ +11
+ record this change to 'a'? [Ynesfdaq?] y
+
+ EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
+ EDITOR: HG: - e704349bd21b: split 1
+ EDITOR: HG: - a09ad58faae3: split 2
+ EDITOR: HG: Write commit message for the next split changeset.
+ EDITOR: a2
+ EDITOR:
+ EDITOR:
+ EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed.
+ EDITOR: HG: Leave message empty to abort commit.
+ EDITOR: HG: --
+ EDITOR: HG: user: test
+ EDITOR: HG: branch 'default'
+ EDITOR: HG: changed a
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/1df0d5c5a3ab-8341b760-split.hg (obsstore-off !)
+
+#if obsstore-off
+ $ hg bookmark
+ r1 0:a61bcde8c529
+ r2 3:00eebaf8d2e2
+ * r3 3:00eebaf8d2e2
+ $ hg glog -p
+ @ 3:00eebaf8d2e2 split 3 r2 r3
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -1,1 +1,1 @@
+ | -1
+ | +11
+ |
+ o 2:a09ad58faae3 split 2
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -3,1 +3,1 @@
+ | -3
+ | +33
+ |
+ o 1:e704349bd21b split 1
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -5,1 +5,1 @@
+ | -5
+ | +55
+ |
+ o 0:a61bcde8c529 a1 r1
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,5 @@
+ +1
+ +2
+ +3
+ +4
+ +5
+
+#else
+ $ hg bookmark
+ r1 0:a61bcde8c529
+ r2 4:00eebaf8d2e2
+ * r3 4:00eebaf8d2e2
+ $ hg glog
+ @ 4:00eebaf8d2e2 split 3 r2 r3
+ |
+ o 3:a09ad58faae3 split 2
+ |
+ o 2:e704349bd21b split 1
+ |
+ o 0:a61bcde8c529 a1 r1
+
+#endif
+
+Split a head while working parent is not that head
+
+ $ cd $TESTTMP/b
+
+ $ hg up 0 -q
+ $ hg bookmark r3
+
+ $ runsplit tip >/dev/null
+
+#if obsstore-off
+ $ hg bookmark
+ r1 0:a61bcde8c529
+ r2 3:00eebaf8d2e2
+ * r3 0:a61bcde8c529
+ $ hg glog
+ o 3:00eebaf8d2e2 split 3 r2
+ |
+ o 2:a09ad58faae3 split 2
+ |
+ o 1:e704349bd21b split 1
+ |
+ @ 0:a61bcde8c529 a1 r1 r3
+
+#else
+ $ hg bookmark
+ r1 0:a61bcde8c529
+ r2 4:00eebaf8d2e2
+ * r3 0:a61bcde8c529
+ $ hg glog
+ o 4:00eebaf8d2e2 split 3 r2
+ |
+ o 3:a09ad58faae3 split 2
+ |
+ o 2:e704349bd21b split 1
+ |
+ @ 0:a61bcde8c529 a1 r1 r3
+
+#endif
+
+Split a non-head
+
+ $ cd $TESTTMP/c
+ $ echo d > d
+ $ hg ci -m d1 -A d
+ $ hg bookmark -i d1
+ $ echo 2 >> d
+ $ hg ci -m d2
+ $ echo 3 >> d
+ $ hg ci -m d3
+ $ hg bookmark -i d3
+ $ hg up '.^' -q
+ $ hg bookmark d2
+ $ cp -R . ../d
+
+ $ runsplit -r 1 | grep rebasing
+ rebasing 2:b5c5ea414030 "d1" (d1)
+ rebasing 3:f4a0a8d004cc "d2" (d2)
+ rebasing 4:777940761eba "d3" (d3)
+#if obsstore-off
+ $ hg bookmark
+ d1 4:c4b449ef030e
+ * d2 5:c9dd00ab36a3
+ d3 6:19f476bc865c
+ r1 0:a61bcde8c529
+ r2 3:00eebaf8d2e2
+ $ hg glog -p
+ o 6:19f476bc865c d3 d3
+ | diff --git a/d b/d
+ | --- a/d
+ | +++ b/d
+ | @@ -2,0 +3,1 @@
+ | +3
+ |
+ @ 5:c9dd00ab36a3 d2 d2
+ | diff --git a/d b/d
+ | --- a/d
+ | +++ b/d
+ | @@ -1,0 +2,1 @@
+ | +2
+ |
+ o 4:c4b449ef030e d1 d1
+ | diff --git a/d b/d
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/d
+ | @@ -0,0 +1,1 @@
+ | +d
+ |
+ o 3:00eebaf8d2e2 split 3 r2
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -1,1 +1,1 @@
+ | -1
+ | +11
+ |
+ o 2:a09ad58faae3 split 2
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -3,1 +3,1 @@
+ | -3
+ | +33
+ |
+ o 1:e704349bd21b split 1
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -5,1 +5,1 @@
+ | -5
+ | +55
+ |
+ o 0:a61bcde8c529 a1 r1
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,5 @@
+ +1
+ +2
+ +3
+ +4
+ +5
+
+#else
+ $ hg bookmark
+ d1 8:c4b449ef030e
+ * d2 9:c9dd00ab36a3
+ d3 10:19f476bc865c
+ r1 0:a61bcde8c529
+ r2 7:00eebaf8d2e2
+ $ hg glog
+ o 10:19f476bc865c d3 d3
+ |
+ @ 9:c9dd00ab36a3 d2 d2
+ |
+ o 8:c4b449ef030e d1 d1
+ |
+ o 7:00eebaf8d2e2 split 3 r2
+ |
+ o 6:a09ad58faae3 split 2
+ |
+ o 5:e704349bd21b split 1
+ |
+ o 0:a61bcde8c529 a1 r1
+
+#endif
+
+Split a non-head without rebase
+
+ $ cd $TESTTMP/d
+#if obsstore-off
+ $ runsplit -r 1 --no-rebase
+ abort: cannot split changeset with children without rebase
+ [255]
+#else
+ $ runsplit -r 1 --no-rebase >/dev/null
+ 3 new orphan changesets
+ $ hg bookmark
+ d1 2:b5c5ea414030
+ * d2 3:f4a0a8d004cc
+ d3 4:777940761eba
+ r1 0:a61bcde8c529
+ r2 7:00eebaf8d2e2
+
+ $ hg glog
+ o 7:00eebaf8d2e2 split 3 r2
+ |
+ o 6:a09ad58faae3 split 2
+ |
+ o 5:e704349bd21b split 1
+ |
+ | * 4:777940761eba d3 d3
+ | |
+ | @ 3:f4a0a8d004cc d2 d2
+ | |
+ | * 2:b5c5ea414030 d1 d1
+ | |
+ | x 1:1df0d5c5a3ab a2
+ |/
+ o 0:a61bcde8c529 a1 r1
+
+#endif
+
+Split a non-head with obsoleted descendants
+
+#if obsstore-on
+ $ hg init $TESTTMP/e
+ $ cd $TESTTMP/e
+ $ hg debugdrawdag <<'EOS'
+ > H I J
+ > | | |
+ > F G1 G2 # amend: G1 -> G2
+ > | | / # prune: F
+ > C D E
+ > \|/
+ > B
+ > |
+ > A
+ > EOS
+ 2 new orphan changesets
+ $ eval `hg tags -T '{tag}={node}\n'`
+ $ rm .hg/localtags
+ $ hg split $B --config experimental.evolution=createmarkers
+ abort: split would leave orphaned changesets behind
+ [255]
+ $ cat > $TESTTMP/messages <<EOF
+ > Split B
+ > EOF
+ $ cat <<EOF | hg split $B
+ > y
+ > y
+ > EOF
+ diff --git a/B b/B
+ new file mode 100644
+ examine changes to 'B'? [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +B
+ \ No newline at end of file
+ record this change to 'B'? [Ynesfdaq?] y
+
+ EDITOR: HG: Splitting 112478962961. Write commit message for the first split changeset.
+ EDITOR: B
+ EDITOR:
+ EDITOR:
+ EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed.
+ EDITOR: HG: Leave message empty to abort commit.
+ EDITOR: HG: --
+ EDITOR: HG: user: test
+ EDITOR: HG: branch 'default'
+ EDITOR: HG: added B
+ created new head
+ rebasing 2:26805aba1e60 "C"
+ rebasing 3:be0ef73c17ad "D"
+ rebasing 4:49cb92066bfd "E"
+ rebasing 7:97a6268cc7ef "G2"
+ rebasing 10:e2f1e425c0db "J"
+ $ hg glog -r 'sort(all(), topo)'
+ o 16:556c085f8b52 J
+ |
+ o 15:8761f6c9123f G2
+ |
+ o 14:a7aeffe59b65 E
+ |
+ | o 13:e1e914ede9ab D
+ |/
+ | o 12:01947e9b98aa C
+ |/
+ o 11:0947baa74d47 Split B
+ |
+ | * 9:88ede1d5ee13 I
+ | |
+ | x 6:af8cbf225b7b G1
+ | |
+ | x 3:be0ef73c17ad D
+ | |
+ | | * 8:74863e5b5074 H
+ | | |
+ | | x 5:ee481a2a1e69 F
+ | | |
+ | | x 2:26805aba1e60 C
+ | |/
+ | x 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+#endif
--- a/tests/test-ssh-bundle1.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-ssh-bundle1.t Mon Jan 22 17:53:02 2018 -0500
@@ -372,7 +372,7 @@
73649e48688a
$ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
- remote: Illegal repository "$TESTTMP/a'repo" (glob)
+ remote: Illegal repository "$TESTTMP/a'repo"
abort: no suitable response from remote hg!
[255]
@@ -467,8 +467,8 @@
running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
sending hello command
sending between command
- remote: 372
- remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
+ remote: 384
+ remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN
remote: 1
preparing listkeys for "bookmarks"
sending listkeys command
--- a/tests/test-ssh.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-ssh.t Mon Jan 22 17:53:02 2018 -0500
@@ -389,7 +389,7 @@
73649e48688a
$ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
- remote: Illegal repository "$TESTTMP/a'repo" (glob)
+ remote: Illegal repository "$TESTTMP/a'repo"
abort: no suitable response from remote hg!
[255]
@@ -478,26 +478,41 @@
debug output
- $ hg pull --debug ssh://user@dummy/remote
+ $ hg pull --debug ssh://user@dummy/remote --config devel.debug.peer-request=yes
pulling from ssh://user@dummy/remote
running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
+ devel-peer-request: hello
sending hello command
+ devel-peer-request: between
+ devel-peer-request: pairs: 81 bytes
sending between command
- remote: 372
- remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
+ remote: 384
+ remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN
remote: 1
query 1; heads
+ devel-peer-request: batch
+ devel-peer-request: cmds: 141 bytes
sending batch command
searching for changes
all remote heads known locally
no changes found
+ devel-peer-request: getbundle
+ devel-peer-request: bookmarks: 1 bytes
+ devel-peer-request: bundlecaps: 247 bytes
+ devel-peer-request: cg: 1 bytes
+ devel-peer-request: common: 122 bytes
+ devel-peer-request: heads: 122 bytes
+ devel-peer-request: listkeys: 9 bytes
+ devel-peer-request: phases: 1 bytes
sending getbundle command
bundle2-input-bundle: with-transaction
+ bundle2-input-part: "bookmarks" supported
+ bundle2-input-part: total payload size 26
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: total payload size 45
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 72
- bundle2-input-bundle: 1 parts total
+ bundle2-input-bundle: 2 parts total
checking for updated bookmarks
$ cd ..
@@ -578,3 +593,37 @@
remote: abort: this is an exercise
abort: pull failed on remote
[255]
+
+abort with no error hint when there is a ssh problem when pulling
+
+ $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
+ pulling from ssh://brokenrepository/
+ abort: no suitable response from remote hg!
+ [255]
+
+abort with configured error hint when there is a ssh problem when pulling
+
+ $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" \
+ > --config ui.ssherrorhint="Please see http://company/internalwiki/ssh.html"
+ pulling from ssh://brokenrepository/
+ abort: no suitable response from remote hg!
+ (Please see http://company/internalwiki/ssh.html)
+ [255]
+
+test that custom environment is passed down to ssh executable
+ $ cat >>dumpenv <<EOF
+ > #! /bin/sh
+ > echo \$VAR >&2
+ > EOF
+ $ chmod +x dumpenv
+ $ hg pull ssh://something --config ui.ssh="sh dumpenv"
+ pulling from ssh://something/
+ remote:
+ abort: no suitable response from remote hg!
+ [255]
+ $ hg pull ssh://something --config ui.ssh="sh dumpenv" --config sshenv.VAR=17
+ pulling from ssh://something/
+ remote: 17
+ abort: no suitable response from remote hg!
+ [255]
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sshserver.py Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,47 @@
+from __future__ import absolute_import, print_function
+
+import io
+import unittest
+
+import silenttestrunner
+
+from mercurial import (
+ sshserver,
+ util,
+ wireproto,
+)
+
+class SSHServerGetArgsTests(unittest.TestCase):
+ def testparseknown(self):
+ tests = [
+ ('* 0\nnodes 0\n', ['', {}]),
+ ('* 0\nnodes 40\n1111111111111111111111111111111111111111\n',
+ ['1111111111111111111111111111111111111111', {}]),
+ ]
+ for input, expected in tests:
+ self.assertparse('known', input, expected)
+
+ def assertparse(self, cmd, input, expected):
+ server = mockserver(input)
+ _func, spec = wireproto.commands[cmd]
+ self.assertEqual(server.getargs(spec), expected)
+
+def mockserver(inbytes):
+ ui = mockui(inbytes)
+ repo = mockrepo(ui)
+ return sshserver.sshserver(ui, repo)
+
+class mockrepo(object):
+ def __init__(self, ui):
+ self.ui = ui
+
+class mockui(object):
+ def __init__(self, inbytes):
+ self.fin = io.BytesIO(inbytes)
+ self.fout = io.BytesIO()
+ self.ferr = io.BytesIO()
+
+if __name__ == '__main__':
+ # Don't call into msvcrt to set BytesIO to binary mode
+ util.setbinary = lambda fp: True
+ silenttestrunner.main(__name__)
--- a/tests/test-status-color.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-status-color.t Mon Jan 22 17:53:02 2018 -0500
@@ -29,6 +29,22 @@
[status.unknown|? ][status.unknown|b/2/in_b_2]
[status.unknown|? ][status.unknown|b/in_b]
[status.unknown|? ][status.unknown|in_root]
+HGPLAIN disables color
+ $ HGPLAIN=1 hg status --color=debug
+ ? a/1/in_a_1 (glob)
+ ? a/in_a (glob)
+ ? b/1/in_b_1 (glob)
+ ? b/2/in_b_2 (glob)
+ ? b/in_b (glob)
+ ? in_root
+HGPLAINEXCEPT=color does not disable color
+ $ HGPLAINEXCEPT=color hg status --color=debug
+ [status.unknown|? ][status.unknown|a/1/in_a_1] (glob)
+ [status.unknown|? ][status.unknown|a/in_a] (glob)
+ [status.unknown|? ][status.unknown|b/1/in_b_1] (glob)
+ [status.unknown|? ][status.unknown|b/2/in_b_2] (glob)
+ [status.unknown|? ][status.unknown|b/in_b] (glob)
+ [status.unknown|? ][status.unknown|in_root]
hg status with template
$ hg status -T "{label('red', path)}\n" --color=debug
@@ -363,7 +379,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 2 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg resolve -m b
--- a/tests/test-status-rev.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-status-rev.t Mon Jan 22 17:53:02 2018 -0500
@@ -83,8 +83,7 @@
! missing_missing_missing-tracked
$ hg status -A --rev 1 'glob:missing_missing_missing-untracked'
- missing_missing_missing-untracked: The system cannot find the file specified (windows !)
- missing_missing_missing-untracked: No such file or directory (no-windows !)
+ missing_missing_missing-untracked: $ENOENT$
Status between first and second commit. Should ignore dirstate status.
--- a/tests/test-status.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-status.t Mon Jan 22 17:53:02 2018 -0500
@@ -572,6 +572,10 @@
$ hg st --config ui.statuscopies=false
M a
R b
+ $ hg st --config ui.tweakdefaults=yes
+ M a
+ b
+ R b
using log status template (issue5155)
$ hg log -Tstatus -r 'wdir()' -C
--- a/tests/test-strip.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-strip.t Mon Jan 22 17:53:02 2018 -0500
@@ -203,7 +203,7 @@
$ hg --traceback strip 4
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
+ saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg
$ hg parents
changeset: 1:ef3a871183d7
user: test
@@ -659,7 +659,7 @@
singlenode1 13:43227190fef8
singlenode2 13:43227190fef8
$ hg strip -B multipledelete1 -B multipledelete2
- saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/e46a4836065c-89ec65c2-backup.hg (glob)
+ saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/e46a4836065c-89ec65c2-backup.hg
bookmark 'multipledelete1' deleted
bookmark 'multipledelete2' deleted
$ hg id -ir e46a4836065c
@@ -669,7 +669,7 @@
abort: unknown revision 'b4594d867745'!
[255]
$ hg strip -B singlenode1 -B singlenode2
- saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/43227190fef8-8da858f2-backup.hg (glob)
+ saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/43227190fef8-8da858f2-backup.hg
bookmark 'singlenode1' deleted
bookmark 'singlenode2' deleted
$ hg id -ir 43227190fef8
@@ -737,12 +737,12 @@
$ hg commit -Aqm b
$ hg strip -r 0
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-e68910bd-backup.hg (glob)
+ saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-e68910bd-backup.hg
$ ls .hg/strip-backup
3903775176ed-e68910bd-backup.hg
$ hg pull -q -r 3903775176ed .hg/strip-backup/3903775176ed-e68910bd-backup.hg
$ hg strip -r 0
- saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-54390173-backup.hg (glob)
+ saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-54390173-backup.hg
$ ls .hg/strip-backup
3903775176ed-54390173-backup.hg
3903775176ed-e68910bd-backup.hg
@@ -846,7 +846,7 @@
bundle2-output-bundle: "HG20", (1 params) 2 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg
updating the branch cache
invalid branchheads cache (served): tip differs
truncating cache/rbc-revs-v1 to 24
@@ -917,7 +917,7 @@
$ hg book -r tip blah
$ hg strip ".^" --config extensions.crash=$TESTTMP/stripstalephasecache.py
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/8f0b4384875c-4fa10deb-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/8f0b4384875c-4fa10deb-backup.hg
$ hg up -C 1
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -937,8 +937,8 @@
> repo.__class__ = crashstriprepo
> EOF
$ hg strip tip --config extensions.crash=$TESTTMP/crashstrip.py
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg (glob)
- strip failed, backup bundle stored in '$TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg' (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg
+ strip failed, backup bundle stored in '$TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg'
abort: boom
[255]
@@ -1005,7 +1005,7 @@
$ hg strip --force -r 35358f982181
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-50d992d4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-50d992d4-backup.hg
$ hg log -G
@ changeset: 3:f62c6c09b707
| branch: new-branch
@@ -1082,7 +1082,7 @@
$ hg strip -r 35358f982181
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg
$ hg log -G
@ changeset: 3:f62c6c09b707
| branch: new-branch
@@ -1109,7 +1109,7 @@
$ hg pull -u $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg
- pulling from $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg (glob)
+ pulling from $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg
searching for changes
adding changesets
adding manifests
@@ -1119,7 +1119,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg strip -k -r 35358f982181
- saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/35358f982181-a6f020aa-backup.hg
$ hg log -G
@ changeset: 3:f62c6c09b707
| branch: new-branch
@@ -1188,7 +1188,7 @@
> EOF
$ hg testdelayedstrip --config extensions.t=$TESTTMP/delayedstrip.py
warning: orphaned descendants detected, not stripping 08ebfeb61bac, 112478962961, 7fb047a69f22
- saved backup bundle to $TESTTMP/delayedstrip/.hg/strip-backup/f585351a92f8-17475721-I.hg (glob)
+ saved backup bundle to $TESTTMP/delayedstrip/.hg/strip-backup/f585351a92f8-17475721-I.hg
$ hg log -G -T '{rev}:{node|short} {desc}' -r 'sort(all(), topo)'
@ 6:2f2d51af6205 J
@@ -1242,7 +1242,7 @@
> EOF
$ hg testnodescleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
warning: orphaned descendants detected, not stripping 112478962961, 1fc8102cda62, 26805aba1e60
- saved backup bundle to $TESTTMP/scmutilcleanup/.hg/strip-backup/f585351a92f8-73fb7c03-replace.hg (glob)
+ saved backup bundle to $TESTTMP/scmutilcleanup/.hg/strip-backup/f585351a92f8-73fb7c03-replace.hg
$ hg log -G -T '{rev}:{node|short} {desc} {bookmarks}' -r 'sort(all(), topo)'
o 8:1473d4b996d1 G2 b-F@divergent3 b-G
@@ -1285,18 +1285,19 @@
> EOF
$ hg testnodescleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
+ 4 new orphan changesets
$ rm .hg/localtags
$ hg log -G -T '{rev}:{node|short} {desc} {bookmarks}' -r 'sort(all(), topo)'
- o 12:1473d4b996d1 G2 b-F@divergent3 b-G
+ * 12:1473d4b996d1 G2 b-F@divergent3 b-G
|
- | o 11:d11b3456a873 F2 b-F
+ | * 11:d11b3456a873 F2 b-F
| |
- | o 8:5cb05ba470a7 H
+ | * 8:5cb05ba470a7 H
|/|
| o 4:7fb047a69f22 E b-F@divergent1
| |
- | | o 10:7c78f703e465 D2 b-D
+ | | * 10:7c78f703e465 D2 b-D
| | |
| | x 6:26805aba1e60 C
| | |
@@ -1307,13 +1308,13 @@
o 0:426bada5c675 A b-B b-C b-I
$ hg debugobsolete
- 1fc8102cda6204549f031015641606ccf5513ec3 1473d4b996d1d1b121de6b39fab6a04fbf9d873e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 64a8289d249234b9886244d379f15e6b650b28e3 d11b3456a873daec7c7bc53e5622e8df6d741bd2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- f585351a92f85104bff7c284233c338b10eb1df7 7c78f703e465d73102cc8780667ce269c5208a40 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 48b9aae0607f43ff110d84e6883c151942add5ab 0 {0000000000000000000000000000000000000000} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 112478962961147124edd43549aedd1a335e44bf 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 08ebfeb61bac6e3f12079de774d285a0d6689eba 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
- 26805aba1e600a82e93661149f2313866a221a7b 0 {112478962961147124edd43549aedd1a335e44bf} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 1fc8102cda6204549f031015641606ccf5513ec3 1473d4b996d1d1b121de6b39fab6a04fbf9d873e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'replace', 'user': 'test'}
+ 64a8289d249234b9886244d379f15e6b650b28e3 d11b3456a873daec7c7bc53e5622e8df6d741bd2 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'replace', 'user': 'test'}
+ f585351a92f85104bff7c284233c338b10eb1df7 7c78f703e465d73102cc8780667ce269c5208a40 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'replace', 'user': 'test'}
+ 48b9aae0607f43ff110d84e6883c151942add5ab 0 {0000000000000000000000000000000000000000} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'replace', 'user': 'test'}
+ 112478962961147124edd43549aedd1a335e44bf 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'replace', 'user': 'test'}
+ 08ebfeb61bac6e3f12079de774d285a0d6689eba 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'replace', 'user': 'test'}
+ 26805aba1e600a82e93661149f2313866a221a7b 0 {112478962961147124edd43549aedd1a335e44bf} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'replace', 'user': 'test'}
$ cd ..
Test that obsmarkers are restored even when not using generaldelta
@@ -1328,11 +1329,11 @@
$ hg ci -Aqm a
$ hg ci --amend -m a2
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
$ hg strip .
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/issue5678/.hg/strip-backup/489bac576828-bef27e14-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue5678/.hg/strip-backup/489bac576828-bef27e14-backup.hg
$ hg unbundle -q .hg/strip-backup/*
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
$ cd ..
--- a/tests/test-subrepo-deep-nested-change.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-deep-nested-change.t Mon Jan 22 17:53:02 2018 -0500
@@ -18,7 +18,7 @@
$ hg init sub2
$ echo sub2 > sub2/sub2
$ hg add -R sub2
- adding sub2/sub2 (glob)
+ adding sub2/sub2
$ hg commit -R sub2 -m "sub2 import"
Preparing the 'sub1' repo which depends on the subrepo 'sub2'
@@ -41,8 +41,8 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg add -R sub1
- adding sub1/.hgsub (glob)
- adding sub1/sub1 (glob)
+ adding sub1/.hgsub
+ adding sub1/sub1
$ hg commit -R sub1 -m "sub1 import"
Preparing the 'main' repo which depends on the subrepo 'sub1'
@@ -77,8 +77,8 @@
cloning subrepo sub2 from $TESTTMP/sub2
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg add -R main
- adding main/.hgsub (glob)
- adding main/main (glob)
+ adding main/.hgsub
+ adding main/main
$ hg commit -R main -m "main import"
#if serve
@@ -89,13 +89,13 @@
are also available as siblings of 'main'.
$ hg serve -R main --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
- adding = $TESTTMP/main (glob)
- adding sub1 = $TESTTMP/main/sub1 (glob)
- adding sub1/sub2 = $TESTTMP/main/sub1/sub2 (glob)
+ adding = $TESTTMP/main
+ adding sub1 = $TESTTMP/main/sub1
+ adding sub1/sub2 = $TESTTMP/main/sub1/sub2
listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
- adding = $TESTTMP/main (glob) (?)
- adding sub1 = $TESTTMP/main/sub1 (glob) (?)
- adding sub1/sub2 = $TESTTMP/main/sub1/sub2 (glob) (?)
+ adding = $TESTTMP/main (?)
+ adding sub1 = $TESTTMP/main/sub1 (?)
+ adding sub1/sub2 = $TESTTMP/main/sub1/sub2 (?)
$ cat hg1.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT httpclone --config progress.disable=True
@@ -186,14 +186,14 @@
\r (no-eol) (esc)
updating to branch default
cloning subrepo sub1 from $TESTTMP/sub1
- cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
+ cloning subrepo sub1/sub2 from $TESTTMP/sub2
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
Largefiles is NOT enabled in the clone if the source repo doesn't require it
$ cat cloned/.hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/main (glob)
+ default = $TESTTMP/main
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -231,7 +231,7 @@
$ echo modified > cloned/sub1/sub2/sub2
$ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
committing subrepository sub1
- committing subrepository sub1/sub2 (glob)
+ committing subrepository sub1/sub2
Checking modified node ids
@@ -263,7 +263,7 @@
$ hg ci -ASm "add test.txt"
adding sub1/sub2/folder/test.txt
committing subrepository sub1
- committing subrepository sub1/sub2 (glob)
+ committing subrepository sub1/sub2
$ rm -r main
$ hg archive -S -qr 'wdir()' ../wdir
@@ -309,8 +309,8 @@
\r (no-eol) (esc)
deleting [===========================================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
- removing sub1/sub2/folder/test.txt (glob)
- removing sub1/sub2/test.txt (glob)
+ removing sub1/sub2/folder/test.txt
+ removing sub1/sub2/test.txt
$ hg status -S
R sub1/sub2/folder/test.txt
R sub1/sub2/test.txt
@@ -368,10 +368,10 @@
\r (no-eol) (esc)
searching for exact renames [ ] 0/1\r (no-eol) (esc)
\r (no-eol) (esc)
- adding ../sub1/sub2/folder/test.txt (glob)
- removing ../sub1/sub2/test.txt (glob)
- adding ../sub1/foo (glob)
- adding bar/abc (glob)
+ adding ../sub1/sub2/folder/test.txt
+ removing ../sub1/sub2/test.txt
+ adding ../sub1/foo
+ adding bar/abc
$ cd ..
$ hg status -S
A foo/bar/abc
@@ -398,9 +398,9 @@
archiving (sub1) [===================================>] 4/4\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
$ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:'
Only in ../wdir: .hg_archival.txt
@@ -442,9 +442,9 @@
archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
$ find ../wdir -type f | sort
../wdir/.hg_archival.txt
@@ -474,10 +474,10 @@
archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
$ cat ../wdir/.hg_archival.txt
repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
@@ -489,15 +489,15 @@
$ touch sub1/sub2/folder/bar
$ hg addremove sub1/sub2
- adding sub1/sub2/folder/bar (glob)
+ adding sub1/sub2/folder/bar
$ hg status -S
A sub1/sub2/folder/bar
? foo/bar/abc
? sub1/foo
$ hg update -Cq
$ hg addremove sub1
- adding sub1/sub2/folder/bar (glob)
- adding sub1/foo (glob)
+ adding sub1/sub2/folder/bar
+ adding sub1/foo
$ hg update -Cq
$ rm sub1/sub2/folder/test.txt
$ rm sub1/sub2/test.txt
@@ -508,7 +508,7 @@
adding sub1/foo
adding foo/bar/abc
committing subrepository sub1
- committing subrepository sub1/sub2 (glob)
+ committing subrepository sub1/sub2
$ hg forget sub1/sub2/sub2
$ echo x > sub1/sub2/x.txt
@@ -518,75 +518,75 @@
$ hg files -S
.hgsub
.hgsubstate
- foo/bar/abc (glob)
+ foo/bar/abc
main
- sub1/.hgsub (glob)
- sub1/.hgsubstate (glob)
- sub1/foo (glob)
- sub1/sub1 (glob)
- sub1/sub2/folder/bar (glob)
- sub1/sub2/x.txt (glob)
+ sub1/.hgsub
+ sub1/.hgsubstate
+ sub1/foo
+ sub1/sub1
+ sub1/sub2/folder/bar
+ sub1/sub2/x.txt
$ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
.hgsub
.hgsubstate
- foo/bar/abc (glob)
+ foo/bar/abc
main
- sub1/.hgsub (glob)
- sub1/.hgsubstate (glob)
- sub1/foo (glob)
- sub1/sub1 (glob)
- sub1/sub2/folder/bar (glob)
- sub1/sub2/x.txt (glob)
+ sub1/.hgsub
+ sub1/.hgsubstate
+ sub1/foo
+ sub1/sub1
+ sub1/sub2/folder/bar
+ sub1/sub2/x.txt
$ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
.hgsub
.hgsubstate
main
- sub1/.hgsub (glob)
- sub1/.hgsubstate (glob)
- sub1/sub1 (glob)
- sub1/sub2/folder/test.txt (glob)
- sub1/sub2/sub2 (glob)
- sub1/sub2/test.txt (glob)
+ sub1/.hgsub
+ sub1/.hgsubstate
+ sub1/sub1
+ sub1/sub2/folder/test.txt
+ sub1/sub2/sub2
+ sub1/sub2/test.txt
$ hg files sub1
- sub1/.hgsub (glob)
- sub1/.hgsubstate (glob)
- sub1/foo (glob)
- sub1/sub1 (glob)
- sub1/sub2/folder/bar (glob)
- sub1/sub2/x.txt (glob)
+ sub1/.hgsub
+ sub1/.hgsubstate
+ sub1/foo
+ sub1/sub1
+ sub1/sub2/folder/bar
+ sub1/sub2/x.txt
$ hg files sub1/sub2
- sub1/sub2/folder/bar (glob)
- sub1/sub2/x.txt (glob)
+ sub1/sub2/folder/bar
+ sub1/sub2/x.txt
$ hg files
.hgsub
.hgsubstate
- foo/bar/abc (glob)
+ foo/bar/abc
main
$ hg files -S -r '.^' sub1/sub2/folder
- sub1/sub2/folder/test.txt (glob)
+ sub1/sub2/folder/test.txt
$ hg files -S -r '.^' sub1/sub2/missing
- sub1/sub2/missing: no such file in rev 78026e779ea6 (glob)
+ sub1/sub2/missing: no such file in rev 78026e779ea6
[1]
$ hg files -r '.^' sub1/
- sub1/.hgsub (glob)
- sub1/.hgsubstate (glob)
- sub1/sub1 (glob)
- sub1/sub2/folder/test.txt (glob)
- sub1/sub2/sub2 (glob)
- sub1/sub2/test.txt (glob)
+ sub1/.hgsub
+ sub1/.hgsubstate
+ sub1/sub1
+ sub1/sub2/folder/test.txt
+ sub1/sub2/sub2
+ sub1/sub2/test.txt
$ hg files -r '.^' sub1/sub2
- sub1/sub2/folder/test.txt (glob)
- sub1/sub2/sub2 (glob)
- sub1/sub2/test.txt (glob)
+ sub1/sub2/folder/test.txt
+ sub1/sub2/sub2
+ sub1/sub2/test.txt
$ hg rollback -q
$ hg up -Cq
@@ -605,10 +605,10 @@
archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
$ find ../archive_all | sort
../archive_all
@@ -642,8 +642,8 @@
archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
$ find ../archive_exclude | sort
../archive_exclude
@@ -663,9 +663,9 @@
archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
\r (no-eol) (esc)
$ find ../archive_include | sort
../archive_include
@@ -686,7 +686,7 @@
$ hg --config extensions.largefiles= add --large large.bin
$ hg --config extensions.largefiles= ci -S -m "add large files"
committing subrepository sub1
- committing subrepository sub1/sub2 (glob)
+ committing subrepository sub1/sub2
$ hg --config extensions.largefiles= archive -S ../archive_lf
$ find ../archive_lf | sort
@@ -785,7 +785,7 @@
$ cat ../lfclone/.hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/cloned (glob)
+ default = $TESTTMP/cloned
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -817,16 +817,16 @@
$ touch sub1/sub2/untracked.txt
$ touch sub1/sub2/large.dat
$ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
- not removing sub1/sub2/untracked.txt: file is already untracked (glob)
+ not removing sub1/sub2/untracked.txt: file is already untracked
[1]
$ hg add --large --dry-run -v sub1/sub2/untracked.txt
- adding sub1/sub2/untracked.txt as a largefile (glob)
+ adding sub1/sub2/untracked.txt as a largefile
$ hg add --large -v sub1/sub2/untracked.txt
- adding sub1/sub2/untracked.txt as a largefile (glob)
+ adding sub1/sub2/untracked.txt as a largefile
$ hg add --normal -v sub1/sub2/large.dat
- adding sub1/sub2/large.dat (glob)
+ adding sub1/sub2/large.dat
$ hg forget -v sub1/sub2/untracked.txt
- removing sub1/sub2/untracked.txt (glob)
+ removing sub1/sub2/untracked.txt
$ hg status -S
A sub1/sub2/large.dat
R sub1/sub2/large.bin
@@ -907,7 +907,7 @@
$ hg add -v foo/bar/abc a.txt a.dat
adding a.dat as a largefile
adding a.txt
- adding foo/bar/abc (glob)
+ adding foo/bar/abc
$ hg ci -m 'dir commit with only normal file deltas' foo/bar
$ hg status
A a.dat
@@ -1040,7 +1040,7 @@
archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
archiving (sub3) [ <=> ] 0\r (no-eol) (esc)
@@ -1054,7 +1054,7 @@
archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
diff -Nru cloned.*/.hgsub cloned/.hgsub (glob)
--- cloned.*/.hgsub * (glob)
@@ -1082,8 +1082,8 @@
archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
archiving [ ] 0/8\r (no-eol) (esc)
@@ -1101,10 +1101,10 @@
archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
archiving (sub3) [ ] 0/1\r (no-eol) (esc)
@@ -1179,8 +1179,8 @@
archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (sub1/sub2) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
+ archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
--- */cloned.*/sub1/sub2/sub2 * (glob)
+++ */cloned/sub1/sub2/sub2 * (glob)
--- a/tests/test-subrepo-git.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-git.t Mon Jan 22 17:53:02 2018 -0500
@@ -191,7 +191,7 @@
user b push changes
$ hg push 2>/dev/null
- pushing to $TESTTMP/t (glob)
+ pushing to $TESTTMP/t
pushing branch testing of subrepository "s"
searching for changes
adding changesets
@@ -203,7 +203,7 @@
$ cd ../ta
$ hg pull
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -236,7 +236,7 @@
source ../gitroot
revision f47b465e1bce645dbf37232a00574aa1546ca8d3
$ hg push 2>/dev/null
- pushing to $TESTTMP/t (glob)
+ pushing to $TESTTMP/t
pushing branch testing of subrepository "s"
searching for changes
adding changesets
@@ -268,7 +268,7 @@
$ echo aa >> a
$ hg commit -m aa
$ hg push
- pushing to $TESTTMP/t (glob)
+ pushing to $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -320,7 +320,7 @@
$ hg up
From $TESTTMP/tb/s
* [new branch] b2 -> origin/b2
- Previous HEAD position was f47b465... merge
+ Previous HEAD position was f47b465* merge (glob)
Switched to a new branch 'b2'
pulling subrepo s from $TESTTMP/tb/s
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -399,7 +399,7 @@
M inner/s/f
$ hg commit --subrepos -m nested
committing subrepository inner
- committing subrepository inner/s (glob)
+ committing subrepository inner/s
nested archive
@@ -572,8 +572,8 @@
$ cd ..
$ cd s
$ git checkout aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
- Previous HEAD position was 32a3438... fff
- HEAD is now at aa84837... f
+ Previous HEAD position was 32a3438* fff (glob)
+ HEAD is now at aa84837* f (glob)
$ cd ..
$ hg update 1
subrepository s diverged (local revision: 32a3438, remote revision: da5f5b1)
@@ -623,11 +623,11 @@
Test subrepo already at intended revision:
$ cd s
$ git checkout 32a343883b74769118bb1d3b4b1fbf9156f4dddc
- HEAD is now at 32a3438... fff
+ HEAD is now at 32a3438* fff (glob)
$ cd ..
$ hg update 1
- Previous HEAD position was 32a3438... fff
- HEAD is now at da5f5b1... g
+ Previous HEAD position was 32a3438* fff (glob)
+ HEAD is now at da5f5b1* g (glob)
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg id -n
1
@@ -640,9 +640,10 @@
traceback
#if no-windows
$ hg forget 'notafile*'
- notafile*: No such file or directory
+ notafile*: $ENOENT$
[1]
#else
+error: The filename, directory name, or volume label syntax is incorrect
$ hg forget 'notafile'
notafile: * (glob)
[1]
@@ -677,8 +678,8 @@
$ hg -R tc pull -q
$ hg -R tc update -q -C 3473d20bddcf 2>&1 | sort
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg'
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg'
$ cd tc
$ hg parents -q
8:3473d20bddcf
@@ -724,8 +725,8 @@
$ cd ..
$ hg -R tc pull -q
$ hg -R tc update -q -C ed23f7fe024e 2>&1 | sort
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg'
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg'
$ cd tc
$ hg parents -q
9:ed23f7fe024e
@@ -847,8 +848,8 @@
the output contains a regex, because git 1.7.10 and 1.7.11
change the amount of whitespace
$ hg diff --subrepos --stat
- \s*barfoo |\s*1 + (re)
- \s*foobar |\s*2 +- (re)
+ \s*barfoo \|\s+1 \+ (re)
+ \s*foobar \|\s+2 \+- (re)
2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) (re)
adding an include should ignore the other elements
@@ -923,8 +924,8 @@
$ echo 'bloop' > s/foobar
$ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
reverting subrepo ../gitroot
- creating directory: $TESTTMP/tc/.hg/origbackups (glob)
- saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar (glob)
+ creating directory: $TESTTMP/tc/.hg/origbackups
+ saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar
$ ls .hg/origbackups
foobar
$ rm -rf .hg/origbackups
@@ -997,7 +998,7 @@
reverting subrepo ../gitroot
$ hg add --subrepos "glob:**.python"
- adding s/snake.python (glob)
+ adding s/snake.python
$ hg st --subrepos s
A s/snake.python
? s/barfoo
@@ -1008,11 +1009,11 @@
reverting subrepo ../gitroot
$ hg add --subrepos s
- adding s/barfoo (glob)
- adding s/c.c (glob)
- adding s/cpp.cpp (glob)
- adding s/foobar.orig (glob)
- adding s/snake.python (glob)
+ adding s/barfoo
+ adding s/c.c
+ adding s/cpp.cpp
+ adding s/foobar.orig
+ adding s/snake.python
$ hg st --subrepos s
A s/barfoo
A s/c.c
@@ -1030,10 +1031,10 @@
? s/snake.python
$ hg add --subrepos --exclude "path:s/c.c"
- adding s/barfoo (glob)
- adding s/cpp.cpp (glob)
- adding s/foobar.orig (glob)
- adding s/snake.python (glob)
+ adding s/barfoo
+ adding s/cpp.cpp
+ adding s/foobar.orig
+ adding s/snake.python
$ hg st --subrepos s
A s/barfoo
A s/cpp.cpp
@@ -1049,7 +1050,7 @@
> EOF
$ hg add .hgignore
$ hg add --subrepos "glob:**.python" s/barfoo
- adding s/snake.python (glob)
+ adding s/snake.python
$ hg st --subrepos s
A s/barfoo
A s/snake.python
@@ -1098,10 +1099,10 @@
correctly do a dry run
$ hg add --subrepos s --dry-run
- adding s/barfoo (glob)
- adding s/c.c (glob)
- adding s/cpp.cpp (glob)
- adding s/foobar.orig (glob)
+ adding s/barfoo
+ adding s/c.c
+ adding s/cpp.cpp
+ adding s/foobar.orig
$ hg st --subrepos s
A s/.gitignore
A s/snake.python
@@ -1195,7 +1196,7 @@
$ unset GIT_ALLOW_PROTOCOL
$ PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
> malicious-subrepository malicious-subrepository-protected
- Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob)
+ Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'...
fatal: transport 'ext' not allowed
updating to branch default
cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
@@ -1208,7 +1209,7 @@
$ rm -f pwned.txt
$ env GIT_ALLOW_PROTOCOL=ext PWNED_MSG="you asked for it" hg clone \
> malicious-subrepository malicious-subrepository-clone-allowed
- Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... (glob)
+ Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'...
fatal: Could not read from remote repository.
Please make sure you have the correct access rights
--- a/tests/test-subrepo-missing.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-missing.t Mon Jan 22 17:53:02 2018 -0500
@@ -130,10 +130,10 @@
checking files
2 files, 5 changesets, 5 total revisions
checking subrepo links
- 0: repository $TESTTMP/repo/subrepo not found (glob)
- 1: repository $TESTTMP/repo/subrepo not found (glob)
- 3: repository $TESTTMP/repo/subrepo not found (glob)
- 4: repository $TESTTMP/repo/subrepo not found (glob)
+ 0: repository $TESTTMP/repo/subrepo not found
+ 1: repository $TESTTMP/repo/subrepo not found
+ 3: repository $TESTTMP/repo/subrepo not found
+ 4: repository $TESTTMP/repo/subrepo not found
$ ls
b
$ mv b subrepo
--- a/tests/test-subrepo-paths.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-paths.t Mon Jan 22 17:53:02 2018 -0500
@@ -55,7 +55,7 @@
> .* = \1
> EOF
$ hg debugsub
- abort: bad subrepository pattern in $TESTTMP/outer/.hg/hgrc:2: invalid group reference (glob)
+ abort: bad subrepository pattern in $TESTTMP/outer/.hg/hgrc:2: invalid group reference
[255]
$ cd ..
--- a/tests/test-subrepo-recursion.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-recursion.t Mon Jan 22 17:53:02 2018 -0500
@@ -23,10 +23,10 @@
$ hg add -S .hgsub
$ hg add -S foo/.hgsub
$ hg add -S foo/bar
- adding foo/bar/z.txt (glob)
+ adding foo/bar/z.txt
$ hg add -S
adding x.txt
- adding foo/y.txt (glob)
+ adding foo/y.txt
Test recursive status without committing anything:
@@ -67,7 +67,7 @@
$ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos
committing subrepository foo
- committing subrepository foo/bar (glob)
+ committing subrepository foo/bar
$ cd foo
$ echo y2 >> y.txt
@@ -192,7 +192,7 @@
$ rm -r dir
$ hg commit --subrepos -m 2-3-2
committing subrepository foo
- committing subrepository foo/bar (glob)
+ committing subrepository foo/bar
Test explicit path commands within subrepos: add/forget
$ echo z1 > foo/bar/z2.txt
@@ -205,7 +205,7 @@
$ hg status -S
? foo/bar/z2.txt
$ hg forget foo/bar/z2.txt
- not removing foo/bar/z2.txt: file is already untracked (glob)
+ not removing foo/bar/z2.txt: file is already untracked
[1]
$ hg status -S
? foo/bar/z2.txt
@@ -254,13 +254,13 @@
#if serve
$ cd ..
$ hg serve -R repo --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
- adding = $TESTTMP/repo (glob)
- adding foo = $TESTTMP/repo/foo (glob)
- adding foo/bar = $TESTTMP/repo/foo/bar (glob)
+ adding = $TESTTMP/repo
+ adding foo = $TESTTMP/repo/foo
+ adding foo/bar = $TESTTMP/repo/foo/bar
listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
- adding = $TESTTMP/repo (glob) (?)
- adding foo = $TESTTMP/repo/foo (glob) (?)
- adding foo/bar = $TESTTMP/repo/foo/bar (glob) (?)
+ adding = $TESTTMP/repo (?)
+ adding foo = $TESTTMP/repo/foo (?)
+ adding foo/bar = $TESTTMP/repo/foo/bar (?)
$ cat hg1.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT clone --config progress.disable=True
@@ -278,7 +278,7 @@
adding file changes
added 4 changesets with 7 changes to 3 files
new changesets af048e97ade2:65903cebad86
- cloning subrepo foo/bar from http://localhost:$HGPORT/foo/bar (glob)
+ cloning subrepo foo/bar from http://localhost:$HGPORT/foo/bar
requesting all changes
adding changesets
adding manifests
@@ -340,8 +340,8 @@
archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
+ archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
$ find ../archive | sort
../archive
@@ -372,8 +372,8 @@
archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
+ archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
(unzip date formating is unstable, we do not care about it and glob it out)
@@ -445,11 +445,11 @@
linking [ <=> ] 6\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc)
- archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+ archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
+ archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
\r (no-eol) (esc)
cloning subrepo foo from $TESTTMP/repo/foo
- cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
+ cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
#else
Note there's a slight output glitch on non-hardlink systems: the last
"linking" progress topic never gets closed, leading to slight output corruption on that platform.
@@ -462,7 +462,7 @@
\r (no-eol) (esc)
\r (no-eol) (esc)
linking [ <=> ] 1\r (no-eol) (esc)
- cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
+ cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
#endif
Archive + subrepos uses '/' for all component separators
@@ -498,7 +498,7 @@
$ echo f > foo/f
$ hg archive --subrepos -r tip archive
cloning subrepo foo from $TESTTMP/empty/foo
- abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepository "foo") (glob)
+ abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepository "foo")
[255]
Clone and test outgoing:
@@ -507,11 +507,11 @@
$ hg clone repo repo2
updating to branch default
cloning subrepo foo from $TESTTMP/repo/foo
- cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
+ cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd repo2
$ hg outgoing -S
- comparing with $TESTTMP/repo (glob)
+ comparing with $TESTTMP/repo
searching for changes
no changes found
comparing with $TESTTMP/repo/foo
@@ -537,7 +537,7 @@
$ hg commit --subrepos -m 3-4-2
committing subrepository foo
$ hg outgoing -S
- comparing with $TESTTMP/repo (glob)
+ comparing with $TESTTMP/repo
searching for changes
changeset: 3:2655b8ecc4ee
tag: tip
@@ -567,7 +567,7 @@
Test incoming:
$ hg incoming -S
- comparing with $TESTTMP/repo2 (glob)
+ comparing with $TESTTMP/repo2
searching for changes
changeset: 3:2655b8ecc4ee
tag: tip
--- a/tests/test-subrepo-relative-path.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-relative-path.t Mon Jan 22 17:53:02 2018 -0500
@@ -5,7 +5,7 @@
$ hg init sub
$ echo sub > sub/sub
$ hg add -R sub
- adding sub/sub (glob)
+ adding sub/sub
$ hg commit -R sub -m "sub import"
Preparing the 'main' repo which depends on the subrepo 'sub'
@@ -17,8 +17,8 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg add -R main
- adding main/.hgsub (glob)
- adding main/main (glob)
+ adding main/.hgsub
+ adding main/main
$ hg commit -R main -m "main import"
Cleaning both repositories, just as a clone -U
--- a/tests/test-subrepo-svn.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo-svn.t Mon Jan 22 17:53:02 2018 -0500
@@ -22,12 +22,12 @@
$ echo alpha > src/alpha
$ svn add src
A src
- A src/alpha (glob)
+ A src/alpha
$ mkdir externals
$ echo other > externals/other
$ svn add externals
A externals
- A externals/other (glob)
+ A externals/other
$ svn ci -qm 'Add alpha'
$ svn up -q
$ echo "externals -r1 $SVNREPOURL/externals" > extdef
@@ -91,10 +91,10 @@
$ hg debugsub
path s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 2
path subdir/s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 2
change file in svn and hg, commit
@@ -117,10 +117,10 @@
At revision 3.
$ hg debugsub
path s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 3
path subdir/s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 2
missing svn file, commit should fail
@@ -173,7 +173,7 @@
this commit fails because of meta changes
$ svn propset svn:mime-type 'text/html' s/alpha
- property 'svn:mime-type' set on 's/alpha' (glob)
+ property 'svn:mime-type' set on 's/alpha'
$ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
committing subrepository s
abort: svn:*Commit failed (details follow): (glob)
@@ -204,7 +204,7 @@
this commit fails because of externals meta changes
$ svn propset svn:mime-type 'text/html' s/externals/other
- property 'svn:mime-type' set on 's/externals/other' (glob)
+ property 'svn:mime-type' set on 's/externals/other'
$ hg ci --subrepos -m 'amend externals from hg'
committing subrepository s
abort: cannot commit svn externals (in subrepository "s")
@@ -216,19 +216,19 @@
$ cd ..
$ hg clone t tc
updating to branch default
- A tc/s/alpha (glob)
- U tc/s (glob)
+ A tc/s/alpha
+ U tc/s
Fetching external item into 'tc/s/externals'* (glob)
- A tc/s/externals/other (glob)
+ A tc/s/externals/other
Checked out external at revision 1.
Checked out revision 3.
- A tc/subdir/s/alpha (glob)
- U tc/subdir/s (glob)
+ A tc/subdir/s/alpha
+ U tc/subdir/s
Fetching external item into 'tc/subdir/s/externals'* (glob)
- A tc/subdir/s/externals/other (glob)
+ A tc/subdir/s/externals/other
Checked out external at revision 1.
Checked out revision 2.
@@ -239,10 +239,10 @@
$ hg debugsub
path s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 3
path subdir/s
- source file://*/svn-repo/src (glob)
+ source file:/*/$TESTTMP/svn-repo/src (glob)
revision 2
verify subrepo is contained within the repo directory
@@ -430,7 +430,7 @@
$ echo epsilon.py > dir/epsilon.py
$ svn add dir
A dir
- A dir/epsilon.py (glob)
+ A dir/epsilon.py
$ svn ci -qm 'Add dir/epsilon.py'
$ cd ../..
$ hg init rebaserepo
@@ -495,7 +495,7 @@
$ hg ci --subrepos -m cleanup | filter_svn_output
committing subrepository obstruct
- Sending obstruct/other (glob)
+ Sending obstruct/other
Committed revision 7.
At revision 7.
$ svn mkdir -qm "baseline" $SVNREPOURL/trunk
@@ -516,7 +516,7 @@
$ cd ..
$ rm -rf tempwc
$ svn co "$SVNREPOURL/branch"@10 recreated
- A recreated/somethingold (glob)
+ A recreated/somethingold
Checked out revision 10.
$ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
$ hg ci -m addsub
@@ -571,15 +571,9 @@
Test forgetting files, not implemented in svn subrepo, used to
traceback
-#if no-windows
$ hg forget 'notafile*'
- notafile*: No such file or directory
+ notafile*: $ENOENT$
[1]
-#else
- $ hg forget 'notafile'
- notafile: * (glob)
- [1]
-#endif
Test a subrepo referencing a just moved svn path. Last commit rev will
be different from the revision, and the path will be different as
@@ -590,8 +584,8 @@
$ mkdir trunk/subdir branches
$ echo a > trunk/subdir/a
$ svn add trunk/subdir branches
- A trunk/subdir (glob)
- A trunk/subdir/a (glob)
+ A trunk/subdir
+ A trunk/subdir/a
A branches
$ svn ci -qm addsubdir
$ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
@@ -600,7 +594,7 @@
$ hg init repo2
$ cd repo2
$ svn co $SVNREPOURL/branches/somebranch/subdir
- A subdir/a (glob)
+ A subdir/a
Checked out revision 15.
$ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
$ hg add .hgsub
@@ -624,10 +618,10 @@
$ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
$ svn add .hg sub
A .hg
- A .hg/hgrc (glob)
+ A .hg/hgrc
A sub
- A sub/.hg (glob)
- A sub/.hg/hgrc (glob)
+ A sub/.hg
+ A sub/.hg/hgrc
$ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
$ svn up -q
$ cd ..
@@ -637,8 +631,8 @@
$ cd ..
$ hg -R tc pull -u -q 2>&1 | sort
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
- warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg'
+ warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg'
$ cd tc
$ grep ' s$' .hgsubstate
16 s
--- a/tests/test-subrepo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-subrepo.t Mon Jan 22 17:53:02 2018 -0500
@@ -29,7 +29,7 @@
$ hg files -S
.hgsub
a
- s/a (glob)
+ s/a
$ hg -R s ci -Ams0
$ hg sum
@@ -58,10 +58,10 @@
$ mkdir snot
$ touch snot/file
$ hg remove -S snot/file
- not removing snot/file: file is untracked (glob)
+ not removing snot/file: file is untracked
[1]
$ hg cat snot/filenot
- snot/filenot: no such file in rev 7cf8cfea66e4 (glob)
+ snot/filenot: no such file in rev 7cf8cfea66e4
[1]
$ rm -r snot
@@ -70,12 +70,12 @@
$ echo b > s/a
$ hg revert --dry-run "set:subrepo('glob:s*')"
reverting subrepo s
- reverting s/a (glob)
+ reverting s/a
$ cat s/a
b
$ hg revert "set:subrepo('glob:s*')"
reverting subrepo s
- reverting s/a (glob)
+ reverting s/a
$ cat s/a
a
$ rm s/a.orig
@@ -131,7 +131,7 @@
phases: 2 draft
$ hg ci -m2
committing subrepository s
- committing subrepository s/ss (glob)
+ committing subrepository s/ss
$ hg sum
parent: 2:df30734270ae tip
2
@@ -205,7 +205,7 @@
$ hg init t
$ echo t > t/t
$ hg -R t add t
- adding t/t (glob)
+ adding t/t
5
@@ -321,7 +321,7 @@
my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
warning: conflicts while merging t! (edit, then use 'hg resolve --mark')
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
subrepo t: merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -473,7 +473,7 @@
$ hg clone t tc
updating to branch default
cloning subrepo s from $TESTTMP/t/s
- cloning subrepo s/ss from $TESTTMP/t/s/ss (glob)
+ cloning subrepo s/ss from $TESTTMP/t/s/ss
cloning subrepo t from $TESTTMP/t/t
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd tc
@@ -529,8 +529,8 @@
$ hg ci -m11
committing subrepository t
$ hg push
- pushing to $TESTTMP/t (glob)
- no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
no changes made to subrepo s since last push to $TESTTMP/t/s
pushing subrepo t to $TESTTMP/t/t
searching for changes
@@ -550,16 +550,16 @@
$ hg ci -m12
committing subrepository s
$ hg push
- pushing to $TESTTMP/t (glob)
- no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
pushing subrepo s to $TESTTMP/t/s
searching for changes
abort: push creates new remote head 12a213df6fa9! (in subrepository "s")
(merge or see 'hg help push' for details about pushing new heads)
[255]
$ hg push -f
- pushing to $TESTTMP/t (glob)
- pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ pushing subrepo s/ss to $TESTTMP/t/s/ss
searching for changes
no changes found
pushing subrepo s to $TESTTMP/t/s
@@ -582,7 +582,7 @@
$ hg clone . ../tcc
updating to branch default
cloning subrepo s from $TESTTMP/tc/s
- cloning subrepo s/ss from $TESTTMP/tc/s/ss (glob)
+ cloning subrepo s/ss from $TESTTMP/tc/s/ss
cloning subrepo t from $TESTTMP/tc/t
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -590,7 +590,7 @@
$ hg push -R ../tcc .
pushing to .
- no changes made to subrepo s/ss since last push to s/ss (glob)
+ no changes made to subrepo s/ss since last push to s/ss
no changes made to subrepo s since last push to s
no changes made to subrepo t since last push to t
searching for changes
@@ -602,7 +602,7 @@
$ hg push ../tcc
pushing to ../tcc
- pushing subrepo s/ss to ../tcc/s/ss (glob)
+ pushing subrepo s/ss to ../tcc/s/ss
searching for changes
no changes found
pushing subrepo s to ../tcc/s
@@ -619,7 +619,7 @@
$ hg push ../tcc
pushing to ../tcc
- no changes made to subrepo s/ss since last push to ../tcc/s/ss (glob)
+ no changes made to subrepo s/ss since last push to ../tcc/s/ss
no changes made to subrepo s since last push to ../tcc/s
no changes made to subrepo t since last push to ../tcc/t
searching for changes
@@ -632,8 +632,8 @@
$ hg -R s update '.^'
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg push
- pushing to $TESTTMP/t (glob)
- no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
no changes made to subrepo s since last push to $TESTTMP/t/s
no changes made to subrepo t since last push to $TESTTMP/t/t
searching for changes
@@ -641,8 +641,8 @@
[1]
$ echo foo >> s/a
$ hg push
- pushing to $TESTTMP/t (glob)
- no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
no changes made to subrepo s since last push to $TESTTMP/t/s
no changes made to subrepo t since last push to $TESTTMP/t/t
searching for changes
@@ -657,7 +657,7 @@
$ hg -R s/ss commit -m 'test dirty store detection'
$ hg out -S -r `hg log -r tip -T "{node|short}"`
- comparing with $TESTTMP/t (glob)
+ comparing with $TESTTMP/t
searching for changes
no changes found
comparing with $TESTTMP/t/s
@@ -676,8 +676,8 @@
no changes found
$ hg push
- pushing to $TESTTMP/t (glob)
- pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ pushing subrepo s/ss to $TESTTMP/t/s/ss
searching for changes
adding changesets
adding manifests
@@ -692,8 +692,8 @@
a subrepo store may be clean versus one repo but not versus another
$ hg push
- pushing to $TESTTMP/t (glob)
- no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
+ pushing to $TESTTMP/t
+ no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
no changes made to subrepo s since last push to $TESTTMP/t/s
no changes made to subrepo t since last push to $TESTTMP/t/t
searching for changes
@@ -701,7 +701,7 @@
[1]
$ hg push ../tcc
pushing to ../tcc
- pushing subrepo s/ss to ../tcc/s/ss (glob)
+ pushing subrepo s/ss to ../tcc/s/ss
searching for changes
adding changesets
adding manifests
@@ -740,7 +740,7 @@
$ cd ../tc
$ hg pull
- pulling from $TESTTMP/t (glob)
+ pulling from $TESTTMP/t
searching for changes
adding changesets
adding manifests
@@ -752,7 +752,7 @@
should pull t
$ hg incoming -S -r `hg log -r tip -T "{node|short}"`
- comparing with $TESTTMP/t (glob)
+ comparing with $TESTTMP/t
no changes found
comparing with $TESTTMP/t/s
searching for changes
@@ -918,15 +918,15 @@
$ echo test > testdelete/nested/foo
$ echo test > testdelete/nested2/foo
$ hg -R testdelete/nested add
- adding testdelete/nested/foo (glob)
+ adding testdelete/nested/foo
$ hg -R testdelete/nested2 add
- adding testdelete/nested2/foo (glob)
+ adding testdelete/nested2/foo
$ hg -R testdelete/nested ci -m test
$ hg -R testdelete/nested2 ci -m test
$ echo nested = nested > testdelete/.hgsub
$ echo nested2 = nested2 >> testdelete/.hgsub
$ hg -R testdelete add
- adding testdelete/.hgsub (glob)
+ adding testdelete/.hgsub
$ hg -R testdelete ci -m "nested 1 & 2 added"
$ echo nested = nested > testdelete/.hgsub
$ hg -R testdelete ci -m "nested 2 deleted"
@@ -943,19 +943,19 @@
$ hg init nested_absolute
$ echo test > nested_absolute/foo
$ hg -R nested_absolute add
- adding nested_absolute/foo (glob)
+ adding nested_absolute/foo
$ hg -R nested_absolute ci -mtest
$ cd mercurial
$ hg init nested_relative
$ echo test2 > nested_relative/foo2
$ hg -R nested_relative add
- adding nested_relative/foo2 (glob)
+ adding nested_relative/foo2
$ hg -R nested_relative ci -mtest2
$ hg init main
$ echo "nested_relative = ../nested_relative" > main/.hgsub
$ echo "nested_absolute = `pwd`/nested_absolute" >> main/.hgsub
$ hg -R main add
- adding main/.hgsub (glob)
+ adding main/.hgsub
$ hg -R main ci -m "add subrepos"
$ cd ..
$ hg clone mercurial/main mercurial2/main
@@ -1035,7 +1035,7 @@
$ rm repo/s/b
$ touch -t 200001010000 repo/.hgsubstate
$ hg -R repo revert --all
- reverting repo/.hgsubstate (glob)
+ reverting repo/.hgsubstate
reverting subrepo s
$ hg -R repo update
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -1056,10 +1056,10 @@
$ echo sub/repo = sub/repo > .hgsub
$ hg add .hgsub
$ hg ci -mtest
- committing subrepository sub/repo (glob)
+ committing subrepository sub/repo
$ echo test >> sub/repo/foo
$ hg ci -mtest
- committing subrepository sub/repo (glob)
+ committing subrepository sub/repo
$ hg cat sub/repo/foo
test
test
@@ -1077,9 +1077,9 @@
$ hg cat -T '{path}\n' 'glob:**'
.hgsub
.hgsubstate
- sub/repo/foo (glob)
+ sub/repo/foo
$ hg cat -T '{path}\n' 're:^sub'
- sub/repo/foo (glob)
+ sub/repo/foo
missing subrepos in working directory:
@@ -1108,7 +1108,7 @@
new changesets 19487b456929:be5eb94e7215
(run 'hg update' to get a working copy)
$ hg -R issue1852b update
- abort: default path for subrepository not found (in subrepository "sub/repo") (glob)
+ abort: default path for subrepository not found (in subrepository "sub/repo")
[255]
Ensure a full traceback, not just the SubrepoAbort part
@@ -1133,14 +1133,14 @@
adding file changes
added 1 changesets with 2 changes to 2 files
new changesets 19487b456929
- cloning subrepo sub/repo from issue1852a/sub/repo (glob)
+ cloning subrepo sub/repo from issue1852a/sub/repo
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
Try to push from the other side
$ hg -R issue1852a push `pwd`/issue1852c
- pushing to $TESTTMP/issue1852c (glob)
- pushing subrepo sub/repo to $TESTTMP/issue1852c/sub/repo (glob)
+ pushing to $TESTTMP/issue1852c
+ pushing subrepo sub/repo to $TESTTMP/issue1852c/sub/repo
searching for changes
no changes found
searching for changes
@@ -1183,12 +1183,12 @@
$ echo def > issue1852a/sub/repo/foo
$ hg -R issue1852a ci -SAm 'tweaked subrepo'
adding tmp/sub/repo/foo_p
- committing subrepository sub/repo (glob)
+ committing subrepository sub/repo
$ echo 'addedsub = addedsub' >> issue1852d/.hgsub
$ echo xyz > issue1852d/sub/repo/foo
$ hg -R issue1852d pull -u
- pulling from $TESTTMP/issue1852a (glob)
+ pulling from $TESTTMP/issue1852a
searching for changes
adding changesets
adding manifests
@@ -1197,14 +1197,14 @@
new changesets c82b79fdcc5b
subrepository sub/repo diverged (local revision: f42d5c7504a8, remote revision: 46cd4aac504c)
(M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
- pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo (glob)
+ pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
new changesets 46cd4aac504c
- subrepository sources for sub/repo differ (glob)
+ subrepository sources for sub/repo differ
use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c)? l
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat issue1852d/.hgsubstate
@@ -1541,8 +1541,8 @@
? s/f9
$ hg add -S
adding f8
- adding s/f10 (glob)
- adding s/f9 (glob)
+ adding s/f10
+ adding s/f9
$ hg st -S
A f8
A s/f10
@@ -1583,7 +1583,7 @@
? s/fn18
$ hg add -S 'glob:**fm*'
adding fm15
- adding s/fm17 (glob)
+ adding s/fm17
$ hg st -S
A fm15
A s/fm17
@@ -1621,7 +1621,7 @@
> default=../issue3781-dest/
> EOF
$ hg push --config devel.legacy.exchange=bundle1
- pushing to $TESTTMP/issue3781-dest (glob)
+ pushing to $TESTTMP/issue3781-dest
pushing subrepo s to $TESTTMP/issue3781-dest/s
searching for changes
no changes found
@@ -1631,7 +1631,7 @@
# clean the push cache
$ rm s/.hg/cache/storehash/*
$ hg push # bundle2+
- pushing to $TESTTMP/issue3781-dest (glob)
+ pushing to $TESTTMP/issue3781-dest
pushing subrepo s to $TESTTMP/issue3781-dest/s
searching for changes
no changes found
@@ -1689,7 +1689,7 @@
$ echo phasecheck4 >> t/t
$ hg commit -S -m phasecheck4
committing subrepository s
- committing subrepository s/ss (glob)
+ committing subrepository s/ss
warning: changes are committed in secret phase from subrepository ss
committing subrepository t
warning: changes are committed in secret phase from subrepository s
--- a/tests/test-tag.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-tag.t Mon Jan 22 17:53:02 2018 -0500
@@ -515,7 +515,7 @@
$ hg -R repo-tag --config hooks.commit="sh ../issue3344.sh" tag tag
hook: tag changes detected
hook: +A be090ea6625635128e90f7d89df8beeb2bcc1653 tag
- pushing to $TESTTMP/repo-tag-target (glob)
+ pushing to $TESTTMP/repo-tag-target
searching for changes
adding changesets
adding manifests
@@ -619,7 +619,7 @@
$ cd ../repo-automatic-tag-merge-clone
$ hg pull
- pulling from $TESTTMP/repo-automatic-tag-merge (glob)
+ pulling from $TESTTMP/repo-automatic-tag-merge
searching for changes
adding changesets
adding manifests
@@ -708,7 +708,7 @@
the following 1 tags are in conflict: t7
automatic tag merging of .hgtags failed! (use 'hg resolve --tool :merge' or another merge tool of your choice)
2 files updated, 0 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
[1]
$ hg resolve -l
U .hgtags
--- a/tests/test-template-engine.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-template-engine.t Mon Jan 22 17:53:02 2018 -0500
@@ -4,16 +4,22 @@
> from mercurial import templater
>
> class mytemplater(object):
- > def __init__(self, loader, filters, defaults, aliases):
+ > def __init__(self, loader, filters, defaults, resources, aliases):
> self.loader = loader
+ > self._defaults = defaults
+ > self._resources = resources
>
> def process(self, t, map):
> tmpl = self.loader(t)
- > for k, v in map.iteritems():
+ > props = self._defaults.copy()
+ > props.update(map)
+ > for k, v in props.iteritems():
> if k in ('templ', 'ctx', 'repo', 'revcache', 'cache', 'troubles'):
> continue
> if hasattr(v, '__call__'):
- > v = v(**map)
+ > props = self._resources.copy()
+ > props.update(map)
+ > v = v(**props)
> v = templater.stringify(v)
> tmpl = tmpl.replace('{{%s}}' % k, v)
> yield tmpl
--- a/tests/test-tools.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-tools.t Mon Jan 22 17:53:02 2018 -0500
@@ -13,6 +13,7 @@
check if file is newer (or same)
-r, --recurse recurse into directories
-S, --sha1 show sha1 hash of the content
+ --sha256 show sha256 hash of the content
-M, --md5 show md5 hash of the content
-D, --dump dump file content
-H, --hexdump hexdump file content
@@ -38,6 +39,12 @@
$ f foo
foo:
+ $ f --sha1 foo
+ foo: sha1=f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+
+ $ f --sha256 foo
+ foo: sha256=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
+
#if symlink
$ f foo --mode
foo: mode=644
@@ -91,10 +98,10 @@
$ f -qr dir -HB 17
dir: directory with 3 files (symlink !)
dir: directory with 2 files (no-symlink !)
- dir/bar: (glob)
+ dir/bar:
0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
0010: 39 |9|
- dir/foo: (glob)
+ dir/foo:
0000: 66 6f 6f 0a |foo.|
dir/l: (symlink !)
0000: 79 61 64 64 61 |yadda| (symlink !)
--- a/tests/test-transplant.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-transplant.t Mon Jan 22 17:53:02 2018 -0500
@@ -354,7 +354,6 @@
added 1 changesets with 1 changes to 1 files
applying a53251cdf717
a53251cdf717 transplanted to 8d9279348abb
- new changesets 37a1297eb21b:8d9279348abb
$ hg log --template '{rev} {parents} {desc}\n'
2 b3
1 b1
@@ -560,7 +559,6 @@
added 2 changesets with 2 changes to 2 files
applying a53251cdf717
4:a53251cdf717 merged at 4831f4dc831a
- new changesets 722f4667af76:4831f4dc831a
test interactive transplant
--- a/tests/test-treediscovery.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-treediscovery.t Mon Jan 22 17:53:02 2018 -0500
@@ -516,65 +516,65 @@
#if zstd
$ tstop show
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
#else
$ tstop show
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
- "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
"GET /?cmd=capabilities HTTP/1.1" 200 -
- "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
+ "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
#endif
--- a/tests/test-treemanifest.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-treemanifest.t Mon Jan 22 17:53:02 2018 -0500
@@ -54,12 +54,12 @@
$ hg files -r .
a
b
- dir1/a (glob)
- dir1/b (glob)
- dir1/dir1/a (glob)
- dir1/dir1/b (glob)
- dir1/dir2/a (glob)
- dir1/dir2/b (glob)
+ dir1/a
+ dir1/b
+ dir1/dir1/a
+ dir1/dir1/b
+ dir1/dir2/a
+ dir1/dir2/b
e
The manifest command works
@@ -80,7 +80,7 @@
$ mkdir dir2
$ echo 3 > dir2/a
$ hg add dir2
- adding dir2/a (glob)
+ adding dir2/a
$ hg debugindex --dir dir1 > before
$ hg ci -qm 'add dir2'
$ hg debugindex --dir dir1 > after
@@ -90,8 +90,8 @@
Removing directory does not create an revlog entry
$ hg rm dir1/dir1
- removing dir1/dir1/a (glob)
- removing dir1/dir1/b (glob)
+ removing dir1/dir1/a
+ removing dir1/dir1/b
$ hg debugindex --dir dir1/dir1 > before
$ hg ci -qm 'remove dir1/dir1'
$ hg debugindex --dir dir1/dir1 > after
@@ -105,12 +105,12 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ mv .hg/store/meta/dir2 .hg/store/meta/dir2-backup
$ hg files -r . dir1
- dir1/a (glob)
- dir1/b (glob)
- dir1/dir1/a (glob)
- dir1/dir1/b (glob)
- dir1/dir2/a (glob)
- dir1/dir2/b (glob)
+ dir1/a
+ dir1/b
+ dir1/dir1/a
+ dir1/dir1/b
+ dir1/dir2/a
+ dir1/dir2/b
Check that status between revisions works (calls treemanifest.matches())
without loading all directory revlogs
@@ -161,7 +161,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg revert -r 'desc("modify dir2/a")' .
- reverting dir1/a (glob)
+ reverting dir1/a
$ hg ci -m 'merge, keeping parent 1'
$ hg debugindex --dir dir2 > after
$ diff before after
@@ -177,7 +177,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg revert -r 'desc("modify dir1/a")' .
- reverting dir2/a (glob)
+ reverting dir2/a
$ hg ci -m 'merge, keeping parent 2'
created new head
$ hg debugindex --dir dir1 > after
@@ -322,7 +322,7 @@
M dir1/a
$ hg --config extensions.strip= strip tip
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg
$ hg debugindex --dir dir1
rev offset length delta linkrev nodeid p1 p2
0 0 127 -1 4 064927a0648a 000000000000 000000000000
@@ -455,31 +455,31 @@
Test files from the root.
$ hg files -r .
- .A/one.txt (glob)
- .A/two.txt (glob)
- b/bar/fruits.txt (glob)
- b/bar/orange/fly/gnat.py (glob)
- b/bar/orange/fly/housefly.txt (glob)
- b/foo/apple/bees/flower.py (glob)
+ .A/one.txt
+ .A/two.txt
+ b/bar/fruits.txt
+ b/bar/orange/fly/gnat.py
+ b/bar/orange/fly/housefly.txt
+ b/foo/apple/bees/flower.py
c.txt
d.py
Excludes with a glob should not exclude everything from the glob's root
$ hg files -r . -X 'b/fo?' b
- b/bar/fruits.txt (glob)
- b/bar/orange/fly/gnat.py (glob)
- b/bar/orange/fly/housefly.txt (glob)
+ b/bar/fruits.txt
+ b/bar/orange/fly/gnat.py
+ b/bar/orange/fly/housefly.txt
$ cp -R .hg/store .hg/store-copy
Test files for a subdirectory.
$ rm -r .hg/store/meta/~2e_a
$ hg files -r . b
- b/bar/fruits.txt (glob)
- b/bar/orange/fly/gnat.py (glob)
- b/bar/orange/fly/housefly.txt (glob)
- b/foo/apple/bees/flower.py (glob)
+ b/bar/fruits.txt
+ b/bar/orange/fly/gnat.py
+ b/bar/orange/fly/housefly.txt
+ b/foo/apple/bees/flower.py
$ hg diff -r '.^' -r . --stat b
b/bar/fruits.txt | 1 +
b/bar/orange/fly/gnat.py | 1 +
@@ -494,7 +494,7 @@
$ rm -r .hg/store/meta/b/bar/orange/fly
$ rm -r .hg/store/meta/b/foo/apple/bees
$ hg files -r . -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees
- b/bar/fruits.txt (glob)
+ b/bar/fruits.txt
$ hg diff -r '.^' -r . --stat -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees
b/bar/fruits.txt | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
@@ -505,9 +505,9 @@
$ rm -r .hg/store/meta/~2e_a
$ rm -r .hg/store/meta/b/foo
$ hg files -r . -X path:b/foo b
- b/bar/fruits.txt (glob)
- b/bar/orange/fly/gnat.py (glob)
- b/bar/orange/fly/housefly.txt (glob)
+ b/bar/fruits.txt
+ b/bar/orange/fly/gnat.py
+ b/bar/orange/fly/housefly.txt
$ hg diff -r '.^' -r . --stat -X path:b/foo b
b/bar/fruits.txt | 1 +
b/bar/orange/fly/gnat.py | 1 +
@@ -521,8 +521,8 @@
$ rm -r .hg/store/meta/~2e_a
$ rm -r .hg/store/meta/b/foo
$ hg files -r . -I path:b/bar/orange -I path:a b
- b/bar/orange/fly/gnat.py (glob)
- b/bar/orange/fly/housefly.txt (glob)
+ b/bar/orange/fly/gnat.py
+ b/bar/orange/fly/housefly.txt
$ hg diff -r '.^' -r . --stat -I path:b/bar/orange -I path:a b
b/bar/orange/fly/gnat.py | 1 +
b/bar/orange/fly/housefly.txt | 1 +
@@ -536,7 +536,7 @@
$ rm -r .hg/store/meta/b/foo
$ rm -r .hg/store/meta/b/bar/orange
$ hg files -r . glob:**.txt -I path:b/bar -X path:b/bar/orange
- b/bar/fruits.txt (glob)
+ b/bar/fruits.txt
$ hg diff -r '.^' -r . --stat glob:**.txt -I path:b/bar -X path:b/bar/orange
b/bar/fruits.txt | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
@@ -838,7 +838,7 @@
$ hg co '.^'
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg revert -r tip dir/
- reverting dir/file (glob)
+ reverting dir/file
$ echo b > file # to make sure root manifest is sent
$ hg ci -m grafted
created new head
@@ -855,7 +855,7 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd grafted-dir-repo-clone
$ hg pull -r 2
- pulling from $TESTTMP/grafted-dir-repo (glob)
+ pulling from $TESTTMP/grafted-dir-repo
searching for changes
adding changesets
adding manifests
@@ -869,7 +869,7 @@
$ hg commit -Aqm 'pre-empty commit'
$ hg rm z
$ hg commit --amend -m 'empty commit'
- saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-9e3b6b02-amend.hg (glob)
+ saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-9e3b6b02-amend.hg
$ hg log -r 'tip + tip^' -T '{manifest}\n'
1:678d3574b88c
1:678d3574b88c
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-unamend.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,372 @@
+Test for command `hg unamend` which lives in uncommit extension
+===============================================================
+
+ $ cat >> $HGRCPATH << EOF
+ > [alias]
+ > glog = log -G -T '{rev}:{node|short} {desc}'
+ > [experimental]
+ > evolution = createmarkers, allowunstable
+ > [extensions]
+ > rebase =
+ > amend =
+ > uncommit =
+ > EOF
+
+Repo Setup
+
+ $ hg init repo
+ $ cd repo
+ $ for ch in a b c d e f g h; do touch $ch; echo "foo" >> $ch; hg ci -Aqm "Added "$ch; done
+
+ $ hg glog
+ @ 7:ec2426147f0e Added h
+ |
+ o 6:87d6d6676308 Added g
+ |
+ o 5:825660c69f0c Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+Trying to unamend when there was no amend done
+
+ $ hg unamend
+ abort: changeset must have one predecessor, found 0 predecessors
+ [255]
+
+Unamend on clean wdir and tip
+
+ $ echo "bar" >> h
+ $ hg amend
+
+ $ hg exp
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID c9fa1a715c1b7661c0fafb362a9f30bd75878d7d
+ # Parent 87d6d66763085b629e6d7ed56778c79827273022
+ Added h
+
+ diff -r 87d6d6676308 -r c9fa1a715c1b h
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/h Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,2 @@
+ +foo
+ +bar
+
+ $ hg glog --hidden
+ @ 8:c9fa1a715c1b Added h
+ |
+ | x 7:ec2426147f0e Added h
+ |/
+ o 6:87d6d6676308 Added g
+ |
+ o 5:825660c69f0c Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+ $ hg unamend
+ $ hg glog --hidden
+ @ 9:46d02d47eec6 Added h
+ |
+ | x 8:c9fa1a715c1b Added h
+ |/
+ | x 7:ec2426147f0e Added h
+ |/
+ o 6:87d6d6676308 Added g
+ |
+ o 5:825660c69f0c Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+ $ hg diff
+ diff -r 46d02d47eec6 h
+ --- a/h Thu Jan 01 00:00:00 1970 +0000
+ +++ b/h Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,2 @@
+ foo
+ +bar
+
+ $ hg exp
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 46d02d47eec6ca096b8dcab3f8f5579c40c3dd9a
+ # Parent 87d6d66763085b629e6d7ed56778c79827273022
+ Added h
+
+ diff -r 87d6d6676308 -r 46d02d47eec6 h
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/h Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +foo
+
+ $ hg status
+ M h
+
+ $ hg log -r . -T '{extras % "{extra}\n"}' --config alias.log=log
+ branch=default
+ unamend_source=c9fa1a715c1b7661c0fafb362a9f30bd75878d7d
+
+Using unamend to undo an unamed (intentional)
+
+ $ hg unamend
+ $ hg exp
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 850ddfc1bc662997ec6094ada958f01f0cc8070a
+ # Parent 87d6d66763085b629e6d7ed56778c79827273022
+ Added h
+
+ diff -r 87d6d6676308 -r 850ddfc1bc66 h
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/h Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,2 @@
+ +foo
+ +bar
+ $ hg diff
+
+Unamend on a dirty working directory
+
+ $ echo "bar" >> a
+ $ hg amend
+ $ echo "foobar" >> a
+ $ echo "bar" >> b
+ $ hg status
+ M a
+ M b
+
+ $ hg unamend
+
+ $ hg status
+ M a
+ M b
+
+ $ hg diff
+ diff -r ec338db45d51 a
+ --- a/a Thu Jan 01 00:00:00 1970 +0000
+ +++ b/a Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,3 @@
+ foo
+ +bar
+ +foobar
+ diff -r ec338db45d51 b
+ --- a/b Thu Jan 01 00:00:00 1970 +0000
+ +++ b/b Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,2 @@
+ foo
+ +bar
+
+Unamending an added file
+
+ $ hg ci -m "Added things to a and b"
+ $ echo foo > bar
+ $ hg add bar
+ $ hg amend
+
+ $ hg unamend
+ $ hg status
+ A bar
+
+ $ hg revert --all
+ forgetting bar
+
+Unamending a removed file
+
+ $ hg remove a
+ $ hg amend
+
+ $ hg unamend
+ $ hg status
+ R a
+ ? bar
+
+ $ hg revert --all
+ undeleting a
+
+Unamending an added file with dirty wdir status
+
+ $ hg add bar
+ $ hg amend
+ $ echo bar >> bar
+ $ hg status
+ M bar
+
+ $ hg unamend
+ $ hg status
+ A bar
+ $ hg diff
+ diff -r 7f79409af972 bar
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/bar Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,2 @@
+ +foo
+ +bar
+
+ $ hg revert --all
+ forgetting bar
+
+Unamending in middle of a stack
+
+ $ hg glog
+ @ 19:7f79409af972 Added things to a and b
+ |
+ o 12:ec338db45d51 Added h
+ |
+ o 6:87d6d6676308 Added g
+ |
+ o 5:825660c69f0c Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+ $ hg up 5
+ 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo bar >> f
+ $ hg amend
+ 3 new orphan changesets
+ $ hg rebase -s 6 -d . -q
+
+ $ hg glog
+ o 23:03ddd6fc5af1 Added things to a and b
+ |
+ o 22:3e7b64ee157b Added h
+ |
+ o 21:49635b68477e Added g
+ |
+ @ 20:93f0e8ffab32 Added f
+ |
+ o 4:aa98ab95a928 Added e
+ |
+ o 3:62615734edd5 Added d
+ |
+ o 2:28ad74487de9 Added c
+ |
+ o 1:29becc82797a Added b
+ |
+ o 0:18d04c59bb5d Added a
+
+
+ $ hg --config experimental.evolution=createmarkers unamend
+ abort: cannot unamend changeset with children
+ [255]
+
+ $ hg unamend
+ 3 new orphan changesets
+
+Trying to unamend a public changeset
+
+ $ hg up -C 23
+ 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg phase -r . -p
+ 1 new phase-divergent changesets
+ $ hg unamend
+ abort: cannot unamend public changesets
+ (see 'hg help phases' for details)
+ [255]
+
+Testing whether unamend retains copies or not
+
+ $ hg status
+ ? bar
+
+ $ hg mv a foo
+
+ $ hg ci -m "Moved a to foo"
+ $ hg exp --git
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID cfef290346fbee5126313d7e1aab51d877679b09
+ # Parent 03ddd6fc5af19e028c44a2fd6d790dd22712f231
+ Moved a to foo
+
+ diff --git a/a b/foo
+ rename from a
+ rename to foo
+
+ $ hg mv b foobar
+ $ hg diff --git
+ diff --git a/b b/foobar
+ rename from b
+ rename to foobar
+ $ hg amend
+
+ $ hg exp --git
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID eca050985275bb271ce3092b54e56ea5c85d29a3
+ # Parent 03ddd6fc5af19e028c44a2fd6d790dd22712f231
+ Moved a to foo
+
+ diff --git a/a b/foo
+ rename from a
+ rename to foo
+ diff --git a/b b/foobar
+ rename from b
+ rename to foobar
+
+ $ hg mv c wat
+ $ hg unamend
+
+Retained copies in new prdecessor commit
+
+ $ hg exp --git
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 552e3af4f01f620f88ca27be1f898316235b736a
+ # Parent 03ddd6fc5af19e028c44a2fd6d790dd22712f231
+ Moved a to foo
+
+ diff --git a/a b/foo
+ rename from a
+ rename to foo
+
+Retained copies in working directoy
+
+ $ hg diff --git
+ diff --git a/b b/foobar
+ rename from b
+ rename to foobar
+ diff --git a/c b/wat
+ rename from c
+ rename to wat
--- a/tests/test-uncommit.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-uncommit.t Mon Jan 22 17:53:02 2018 -0500
@@ -41,6 +41,7 @@
$ hg uncommit
abort: cannot uncommit null changeset
+ (no changeset checked out)
[255]
Create some commits
@@ -189,6 +190,7 @@
$ hg bookmark
foo 9:48e5bd7cd583
$ hg uncommit
+ 3 new orphan changesets
$ hg status
M files
A file-abc
@@ -219,6 +221,7 @@
$ hg bookmark
foo 9:48e5bd7cd583
$ hg uncommit file-ab
+ 1 new orphan changesets
$ hg status
A file-ab
@@ -241,21 +244,21 @@
|
o 11:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab
|
- | o 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc
+ | * 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc
| |
- | | o 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo
+ | | * 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo
| | |
| | | x 8:83815831694b1271e9f207cb1b79b2b19275edcb files abcde + foo
| | |/
| | | x 7:0977fa602c2fd7d8427ed4e7ee15ea13b84c9173 update files for abcde
| | |/
- | | o 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcde
+ | | * 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcde
| | |
| | | x 5:0c07a3ccda771b25f1cb1edbd02e683723344ef1 new change abcde
| | |/
| | | x 4:6c4fd43ed714e7fcd8adbaa7b16c953c2e985b60 added file-abcde
| | |/
- | | o 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
+ | | * 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
| | |
| | x 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abc
| |/
--- a/tests/test-unified-test.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-unified-test.t Mon Jan 22 17:53:02 2018 -0500
@@ -81,7 +81,7 @@
fo?/bar\r (no-eol) (glob) (esc)
#if windows
$ printf 'foo\\bar\r'
- foo/bar\r (no-eol) (glob) (esc)
+ foo/bar\r (no-eol) (esc)
#endif
$ printf 'foo/bar\rfoo/bar\r'
foo.bar\r \(no-eol\) (re) (esc)
--- a/tests/test-unionrepo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-unionrepo.t Mon Jan 22 17:53:02 2018 -0500
@@ -167,4 +167,4 @@
"hg files -v" to call fctx.size() -> fctx.iscensored()
$ hg files -R union:b+a -r2 -v
- 3 b/f (glob)
+ 3 b/f
--- a/tests/test-update-branches.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-update-branches.t Mon Jan 22 17:53:02 2018 -0500
@@ -520,6 +520,8 @@
the bookmark with it
$ hg up --quiet 0 # we should be able to update to 3 directly
$ hg up --quiet --hidden 3 # but not implemented yet.
+ updating to a hidden changeset 6efa171f091b
+ (hidden revision '6efa171f091b' was rewritten as: d047485b3896)
$ hg book -f bm
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -530,6 +532,8 @@
Test that 5 is detected as a valid destination from 1
$ hg up --quiet 0 # we should be able to update to 3 directly
$ hg up --quiet --hidden 3 # but not implemented yet.
+ updating to a hidden changeset 6efa171f091b
+ (hidden revision '6efa171f091b' was rewritten as: d047485b3896)
$ hg up 5
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-upgrade-repo.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-upgrade-repo.t Mon Jan 22 17:53:02 2018 -0500
@@ -54,6 +54,67 @@
$ hg init empty
$ cd empty
+ $ hg debugformat
+ format-variant repo
+ fncache: yes
+ dotencode: yes
+ generaldelta: yes
+ plain-cl-delta: yes
+ compression: zlib
+ $ hg debugformat --verbose
+ format-variant repo config default
+ fncache: yes yes yes
+ dotencode: yes yes yes
+ generaldelta: yes yes yes
+ plain-cl-delta: yes yes yes
+ compression: zlib zlib zlib
+ $ hg debugformat --verbose --config format.usegfncache=no
+ format-variant repo config default
+ fncache: yes yes yes
+ dotencode: yes yes yes
+ generaldelta: yes yes yes
+ plain-cl-delta: yes yes yes
+ compression: zlib zlib zlib
+ $ hg debugformat --verbose --config format.usegfncache=no --color=debug
+ format-variant repo config default
+ [formatvariant.name.uptodate|fncache: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.uptodate|dotencode: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.uptodate|generaldelta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.uptodate|plain-cl-delta:][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib]
+ $ hg debugformat -Tjson
+ [
+ {
+ "config": true,
+ "default": true,
+ "name": "fncache",
+ "repo": true
+ },
+ {
+ "config": true,
+ "default": true,
+ "name": "dotencode",
+ "repo": true
+ },
+ {
+ "config": true,
+ "default": true,
+ "name": "generaldelta",
+ "repo": true
+ },
+ {
+ "config": true,
+ "default": true,
+ "name": "plain-cl-delta",
+ "repo": true
+ },
+ {
+ "config": "zlib",
+ "default": "zlib",
+ "name": "compression",
+ "repo": "zlib"
+ }
+ ]
$ hg debugupgraderepo
(no feature deficiencies found in existing repository)
performing an upgrade with "--run" will make the following changes:
@@ -72,6 +133,9 @@
redeltaall
deltas within internal storage will always be recalculated without reusing prior deltas; this will likely make execution run several times slower; this optimization is typically not needed
+ redeltafulladd
+ every revision will be re-added as if it was new content. It will go through the full storage mechanism giving extensions a chance to process it (eg. lfs). This is similar to "redeltaall" but even slower since more logic is involved.
+
--optimize can be used to add optimizations
@@ -93,6 +157,9 @@
redeltaall
deltas within internal storage will always be recalculated without reusing prior deltas; this will likely make execution run several times slower; this optimization is typically not needed
+ redeltafulladd
+ every revision will be re-added as if it was new content. It will go through the full storage mechanism giving extensions a chance to process it (eg. lfs). This is similar to "redeltaall" but even slower since more logic is involved.
+
Various sub-optimal detections work
@@ -101,6 +168,34 @@
> store
> EOF
+ $ hg debugformat
+ format-variant repo
+ fncache: no
+ dotencode: no
+ generaldelta: no
+ plain-cl-delta: yes
+ compression: zlib
+ $ hg debugformat --verbose
+ format-variant repo config default
+ fncache: no yes yes
+ dotencode: no yes yes
+ generaldelta: no yes yes
+ plain-cl-delta: yes yes yes
+ compression: zlib zlib zlib
+ $ hg debugformat --verbose --config format.usegeneraldelta=no
+ format-variant repo config default
+ fncache: no yes yes
+ dotencode: no yes yes
+ generaldelta: no no yes
+ plain-cl-delta: yes yes yes
+ compression: zlib zlib zlib
+ $ hg debugformat --verbose --config format.usegeneraldelta=no --color=debug
+ format-variant repo config default
+ [formatvariant.name.mismatchconfig|fncache: ][formatvariant.repo.mismatchconfig| no][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.mismatchconfig|dotencode: ][formatvariant.repo.mismatchconfig| no][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.mismatchdefault|generaldelta: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes]
+ [formatvariant.name.uptodate|plain-cl-delta:][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes]
+ [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib]
$ hg debugupgraderepo
repository lacks features recommended by current config options:
@@ -140,6 +235,9 @@
redeltaall
deltas within internal storage will always be recalculated without reusing prior deltas; this will likely make execution run several times slower; this optimization is typically not needed
+ redeltafulladd
+ every revision will be re-added as if it was new content. It will go through the full storage mechanism giving extensions a chance to process it (eg. lfs). This is similar to "redeltaall" but even slower since more logic is involved.
+
$ hg --config format.dotencode=false debugupgraderepo
repository lacks features recommended by current config options:
@@ -179,6 +277,9 @@
redeltaall
deltas within internal storage will always be recalculated without reusing prior deltas; this will likely make execution run several times slower; this optimization is typically not needed
+ redeltafulladd
+ every revision will be re-added as if it was new content. It will go through the full storage mechanism giving extensions a chance to process it (eg. lfs). This is similar to "redeltaall" but even slower since more logic is involved.
+
$ cd ..
@@ -350,5 +451,252 @@
removing temporary repository $TESTTMP/store-filenames/.hg/upgrade.* (glob)
copy of old repository backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+ $ hg debugupgraderepo --run --optimize redeltafulladd
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: dotencode, fncache, generaldelta, revlogv1, store
+
+ redeltafulladd
+ each revision will be added as new content to the internal storage; this will likely drastically slow down execution time, but some extensions might need it
+
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage migrated data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
+ migrating 109 bytes in store; 107 bytes tracked data
+ migrating 1 filelogs containing 1 revisions (0 bytes in store; 0 bytes tracked data)
+ finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
+ migrating 1 manifests containing 1 revisions (46 bytes in store; 45 bytes tracked data)
+ finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
+ migrating changelog containing 1 revisions (63 bytes in store; 62 bytes tracked data)
+ finished migrating 1 changelog revisions; change in size: 0 bytes
+ finished migrating 3 total revisions; total change in store size: 0 bytes
+ copying .XX_special_filename
+ copying phaseroots
+ data fully migrated to temporary repository
+ marking source repository as being upgraded; clients will be unable to read from repository
+ starting in-place swap of repository data
+ replaced files will be backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
+ replacing store...
+ store replacement complete; repository was inconsistent for *s (glob)
+ finalizing requirements file and making repository readable again
+ removing temporary repository $TESTTMP/store-filenames/.hg/upgrade.* (glob)
+ copy of old repository backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
+ the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
$ cd ..
+
+Check upgrading a large file repository
+---------------------------------------
+
+ $ hg init largefilesrepo
+ $ cat << EOF >> largefilesrepo/.hg/hgrc
+ > [extensions]
+ > largefiles =
+ > EOF
+
+ $ cd largefilesrepo
+ $ touch foo
+ $ hg add --large foo
+ $ hg -q commit -m initial
+ $ cat .hg/requires
+ dotencode
+ fncache
+ generaldelta
+ largefiles
+ revlogv1
+ store
+
+ $ hg debugupgraderepo --run
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: dotencode, fncache, generaldelta, largefiles, revlogv1, store
+
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
+ migrating 163 bytes in store; 160 bytes tracked data
+ migrating 1 filelogs containing 1 revisions (42 bytes in store; 41 bytes tracked data)
+ finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
+ migrating 1 manifests containing 1 revisions (52 bytes in store; 51 bytes tracked data)
+ finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
+ migrating changelog containing 1 revisions (69 bytes in store; 68 bytes tracked data)
+ finished migrating 1 changelog revisions; change in size: 0 bytes
+ finished migrating 3 total revisions; total change in store size: 0 bytes
+ copying phaseroots
+ data fully migrated to temporary repository
+ marking source repository as being upgraded; clients will be unable to read from repository
+ starting in-place swap of repository data
+ replaced files will be backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
+ replacing store...
+ store replacement complete; repository was inconsistent for *s (glob)
+ finalizing requirements file and making repository readable again
+ removing temporary repository $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
+ copy of old repository backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
+ the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+ $ cat .hg/requires
+ dotencode
+ fncache
+ generaldelta
+ largefiles
+ revlogv1
+ store
+
+ $ cat << EOF >> .hg/hgrc
+ > [extensions]
+ > lfs =
+ > [lfs]
+ > threshold = 10
+ > EOF
+ $ echo '123456789012345' > lfs.bin
+ $ hg ci -Am 'lfs.bin'
+ adding lfs.bin
+ $ grep lfs .hg/requires
+ lfs
+ $ find .hg/store/lfs -type f
+ .hg/store/lfs/objects/d0/beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
+
+ $ hg debugupgraderepo --run
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: dotencode, fncache, generaldelta, largefiles, lfs, revlogv1, store
+
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog)
+ migrating 417 bytes in store; 467 bytes tracked data
+ migrating 2 filelogs containing 2 revisions (168 bytes in store; 182 bytes tracked data)
+ finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes
+ migrating 1 manifests containing 2 revisions (113 bytes in store; 151 bytes tracked data)
+ finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes
+ migrating changelog containing 2 revisions (136 bytes in store; 134 bytes tracked data)
+ finished migrating 2 changelog revisions; change in size: 0 bytes
+ finished migrating 6 total revisions; total change in store size: 0 bytes
+ copying phaseroots
+ copying lfs blob d0beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
+ data fully migrated to temporary repository
+ marking source repository as being upgraded; clients will be unable to read from repository
+ starting in-place swap of repository data
+ replaced files will be backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
+ replacing store...
+ store replacement complete; repository was inconsistent for *s (glob)
+ finalizing requirements file and making repository readable again
+ removing temporary repository $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
+ copy of old repository backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
+ the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+
+ $ grep lfs .hg/requires
+ lfs
+ $ find .hg/store/lfs -type f
+ .hg/store/lfs/objects/d0/beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 2 files, 2 changesets, 2 total revisions
+ $ hg debugdata lfs.bin 0
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:d0beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
+ size 16
+ x-is-binary 0
+
+ $ cd ..
+
+repository config is taken in account
+-------------------------------------
+
+ $ cat << EOF >> $HGRCPATH
+ > [format]
+ > maxchainlen = 1
+ > EOF
+
+ $ hg init localconfig
+ $ cd localconfig
+ $ cat << EOF > file
+ > some content
+ > with some length
+ > to make sure we get a delta
+ > after changes
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > very long
+ > EOF
+ $ hg -q commit -A -m A
+ $ echo "new line" >> file
+ $ hg -q commit -m B
+ $ echo "new line" >> file
+ $ hg -q commit -m C
+
+ $ cat << EOF >> .hg/hgrc
+ > [format]
+ > maxchainlen = 9001
+ > EOF
+ $ hg config format
+ format.maxchainlen=9001
+ $ hg debugindex file
+ rev offset length delta linkrev nodeid p1 p2
+ 0 0 77 -1 0 bcc1d3df78b2 000000000000 000000000000
+ 1 77 21 0 1 af3e29f7a72e bcc1d3df78b2 000000000000
+ 2 98 84 -1 2 8daf79c5522b af3e29f7a72e 000000000000
+
+ $ hg debugupgraderepo --run --optimize redeltaall
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: dotencode, fncache, generaldelta, revlogv1, store
+
+ redeltaall
+ deltas within internal storage will be fully recomputed; this will likely drastically slow down execution time
+
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage migrated data: $TESTTMP/localconfig/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
+ migrating 497 bytes in store; 882 bytes tracked data
+ migrating 1 filelogs containing 3 revisions (182 bytes in store; 573 bytes tracked data)
+ finished migrating 3 filelog revisions across 1 filelogs; change in size: -63 bytes
+ migrating 1 manifests containing 3 revisions (141 bytes in store; 138 bytes tracked data)
+ finished migrating 3 manifest revisions across 1 manifests; change in size: 0 bytes
+ migrating changelog containing 3 revisions (174 bytes in store; 171 bytes tracked data)
+ finished migrating 3 changelog revisions; change in size: 0 bytes
+ finished migrating 9 total revisions; total change in store size: -63 bytes
+ copying phaseroots
+ data fully migrated to temporary repository
+ marking source repository as being upgraded; clients will be unable to read from repository
+ starting in-place swap of repository data
+ replaced files will be backed up at $TESTTMP/localconfig/.hg/upgradebackup.* (glob)
+ replacing store...
+ store replacement complete; repository was inconsistent for *s (glob)
+ finalizing requirements file and making repository readable again
+ removing temporary repository $TESTTMP/localconfig/.hg/upgrade.* (glob)
+ copy of old repository backed up at $TESTTMP/localconfig/.hg/upgradebackup.* (glob)
+ the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+ $ hg debugindex file
+ rev offset length delta linkrev nodeid p1 p2
+ 0 0 77 -1 0 bcc1d3df78b2 000000000000 000000000000
+ 1 77 21 0 1 af3e29f7a72e bcc1d3df78b2 000000000000
+ 2 98 21 1 2 8daf79c5522b af3e29f7a72e 000000000000
+ $ cd ..
+
+ $ cat << EOF >> $HGRCPATH
+ > [format]
+ > maxchainlen = 9001
+ > EOF
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-url-download.t Mon Jan 22 17:53:02 2018 -0500
@@ -0,0 +1,68 @@
+#require serve
+
+ $ hg init server
+ $ hg serve -R server -p $HGPORT -d --pid-file=hg1.pid -E ../error.log
+ $ cat hg1.pid >> $DAEMON_PIDS
+
+Check basic fetching
+
+ $ hg debugdownload "http://localhost:$HGPORT/?cmd=lookup&key=tip"
+ 1 0000000000000000000000000000000000000000
+ $ hg debugdownload -o null.txt "http://localhost:$HGPORT/?cmd=lookup&key=null"
+ $ cat null.txt
+ 1 0000000000000000000000000000000000000000
+
+Check the request is made from the usual Mercurial logic
+(rev details, give different content if the request has a Mercurial user agent)
+
+ $ get-with-headers.py --headeronly "localhost:$HGPORT" "rev/tip" content-type
+ 200 Script output follows
+ content-type: text/html; charset=ascii
+ $ hg debugdownload "http://localhost:$HGPORT/rev/tip"
+
+ # HG changeset patch
+ # User
+ # Date 0 0
+ # Node ID 0000000000000000000000000000000000000000
+
+
+
+
+
+Check other kind of compatible url
+
+ $ hg debugdownload ./null.txt
+ 1 0000000000000000000000000000000000000000
+
+Test largefile URL
+------------------
+
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > largefiles=
+ > EOF
+
+ $ killdaemons.py
+ $ rm -f error.log hg1.pid
+ $ hg serve -R server -p $HGPORT -d --pid-file=hg1.pid -E error.log
+ $ cat hg1.pid >> $DAEMON_PIDS
+
+ $ hg -R server debuglfput null.txt
+ a57b57b39ee4dc3da1e03526596007f480ecdbe8
+
+ $ hg --traceback debugdownload "largefile://a57b57b39ee4dc3da1e03526596007f480ecdbe8" --config paths.default=http://localhost:$HGPORT/
+ 1 0000000000000000000000000000000000000000
+
+from within a repository
+
+ $ hg clone http://localhost:$HGPORT/ client
+ no changes found
+ updating to branch default
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ cd client
+ $ hg path
+ default = http://localhost:$HGPORT/
+ $ hg debugdownload "largefile://a57b57b39ee4dc3da1e03526596007f480ecdbe8"
+ 1 0000000000000000000000000000000000000000
+ $ cd ..
--- a/tests/test-url-rev.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-url-rev.t Mon Jan 22 17:53:02 2018 -0500
@@ -44,7 +44,7 @@
$ cat clone/.hg/hgrc
# example repository config (see 'hg help config' for more info)
[paths]
- default = $TESTTMP/repo#foo (glob)
+ default = $TESTTMP/repo#foo
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -329,4 +329,4 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg root -R '#foo'
- $TESTTMP/#foo (glob)
+ $TESTTMP/#foo
--- a/tests/test-walk.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-walk.t Mon Jan 22 17:53:02 2018 -0500
@@ -255,7 +255,7 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk .hg
- abort: path 'mammals/.hg' is inside nested repo 'mammals' (glob)
+ abort: path 'mammals/.hg' is inside nested repo 'mammals'
[255]
$ hg debugwalk ../.hg
abort: path contains illegal component: .hg
@@ -326,10 +326,10 @@
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk ..
- abort: .. not under root '$TESTTMP/t' (glob)
+ abort: .. not under root '$TESTTMP/t'
[255]
$ hg debugwalk beans/../..
- abort: beans/../.. not under root '$TESTTMP/t' (glob)
+ abort: beans/../.. not under root '$TESTTMP/t'
[255]
$ hg debugwalk .hg
abort: path contains illegal component: .hg
@@ -338,12 +338,27 @@
abort: path contains illegal component: .hg
[255]
$ hg debugwalk beans/../.hg/data
- abort: path contains illegal component: .hg/data (glob)
+ abort: path contains illegal component: .hg/data
[255]
$ hg debugwalk beans/.hg
- abort: path 'beans/.hg' is inside nested repo 'beans' (glob)
+ abort: path 'beans/.hg' is inside nested repo 'beans'
[255]
+Test explicit paths and excludes:
+(BROKEN: nothing should be included, but wctx.walk() does)
+
+ $ hg debugwalk fennel -X fennel
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:fennel(?:/|$))'>, m2=<includematcher includes='(?:fennel(?:/|$))'>>
+ f fennel fennel exact
+ $ hg debugwalk fennel -X 'f*'
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:fennel(?:/|$))'>, m2=<includematcher includes='(?:f[^/]*(?:/|$))'>>
+ f fennel fennel exact
+ $ hg debugwalk beans/black -X 'path:beans'
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
+ f beans/black beans/black exact
+ $ hg debugwalk -I 'path:beans/black' -X 'path:beans'
+ matcher: <differencematcher m1=<includematcher includes='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
+
Test absolute paths:
$ hg debugwalk `pwd`/beans
@@ -355,7 +370,7 @@
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk `pwd`/..
- abort: $TESTTMP/t/.. not under root '$TESTTMP/t' (glob)
+ abort: $TESTTMP/t/.. not under root '$TESTTMP/t'
[255]
Test patterns:
@@ -378,7 +393,7 @@
f glob:glob glob:glob
$ hg debugwalk glob:glob
matcher: <patternmatcher patterns='(?:glob$)'>
- glob: No such file or directory
+ glob: $ENOENT$
$ hg debugwalk glob:glob:glob
matcher: <patternmatcher patterns='(?:glob\\:glob$)'>
f glob:glob glob:glob exact
--- a/tests/test-win32text.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-win32text.t Mon Jan 22 17:53:02 2018 -0500
@@ -112,7 +112,7 @@
abort: pretxncommit.crlf hook failed
[255]
$ hg revert -a
- forgetting d/f2 (glob)
+ forgetting d/f2
$ rm d/f2
$ hg rem f
@@ -177,10 +177,10 @@
$ for x in a b c d; do echo content > dupe/$x; done
$ hg -R dupe add
- adding dupe/a (glob)
- adding dupe/b (glob)
- adding dupe/c (glob)
- adding dupe/d (glob)
+ adding dupe/a
+ adding dupe/b
+ adding dupe/c
+ adding dupe/d
$ $PYTHON unix2dos.py dupe/b dupe/c dupe/d
$ hg -R dupe ci -m a dupe/a
$ hg -R dupe ci -m b/c dupe/[bc]
@@ -385,7 +385,7 @@
WARNING: f4.bat already has CRLF line endings
and does not need EOL conversion by the win32text plugin.
Before your next commit, please reconsider your encode/decode settings in
- Mercurial.ini or $TESTTMP/t/.hg/hgrc. (glob)
+ Mercurial.ini or $TESTTMP/t/.hg/hgrc.
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat bin
hello\x00\r (esc)
--- a/tests/test-wireproto.t Mon Jan 08 16:07:51 2018 -0800
+++ b/tests/test-wireproto.t Mon Jan 22 17:53:02 2018 -0500
@@ -94,23 +94,23 @@
* - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033* (glob)
* - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033* (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
HTTP without the httpheader capability:
@@ -133,17 +133,17 @@
$ cat error2.log
$ cat access2.log
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
- * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
SSH (try to exercise the ssh functionality with a dummy script):