--- a/Makefile Mon Apr 13 16:30:13 2020 +0300
+++ b/Makefile Thu Apr 16 22:51:09 2020 +0530
@@ -64,6 +64,7 @@
$(MAKE) -C doc
cleanbutpackages:
+ rm -f hg.exe
-$(PYTHON) setup.py clean --all # ignore errors from this command
find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
\( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
--- a/black.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/black.toml Thu Apr 16 22:51:09 2020 +0530
@@ -9,7 +9,6 @@
| \.mypy_cache/
| \.venv/
| mercurial/thirdparty/
-| contrib/python-zstandard/
'''
skip-string-normalization = true
quiet = true
--- a/contrib/benchmarks/__init__.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/benchmarks/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -81,7 +81,7 @@
output = ui.popbuffer()
match = outputre.search(output)
if not match:
- raise ValueError("Invalid output {0}".format(output))
+ raise ValueError("Invalid output {}".format(output))
return float(match.group(1))
--- a/contrib/check-py3-compat.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/check-py3-compat.py Thu Apr 16 22:51:09 2020 +0530
@@ -32,7 +32,7 @@
for node in ast.walk(root):
if isinstance(node, ast.ImportFrom):
if node.module == '__future__':
- futures |= set(n.name for n in node.names)
+ futures |= {n.name for n in node.names}
elif isinstance(node, ast.Print):
haveprint = True
--- a/contrib/chg/chg.c Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/chg/chg.c Thu Apr 16 22:51:09 2020 +0530
@@ -226,6 +226,16 @@
}
argv[argsize - 1] = NULL;
+ const char *lc_ctype_env = getenv("LC_CTYPE");
+ if (lc_ctype_env == NULL) {
+ if (putenv("CHG_CLEAR_LC_CTYPE=") != 0)
+ abortmsgerrno("failed to putenv CHG_CLEAR_LC_CTYPE");
+ } else {
+ if (setenv("CHGORIG_LC_CTYPE", lc_ctype_env, 1) != 0) {
+ abortmsgerrno("failed to setenv CHGORIG_LC_CTYYPE");
+ }
+ }
+
if (putenv("CHGINTERNALMARK=") != 0)
abortmsgerrno("failed to putenv");
if (execvp(hgcmd, (char **)argv) < 0)
@@ -364,8 +374,7 @@
/*
* Test whether the command is unsupported or not. This is not designed to
- * cover all cases. But it's fast, does not depend on the server and does
- * not return false positives.
+ * cover all cases. But it's fast, does not depend on the server.
*/
static int isunsupported(int argc, const char *argv[])
{
@@ -378,7 +387,12 @@
for (i = 0; i < argc; ++i) {
if (strcmp(argv[i], "--") == 0)
break;
- if (i == 0 && strcmp("serve", argv[i]) == 0)
+ /*
+ * there can be false positives but no false negative
+ * we cannot assume `serve` will always be first argument
+ * because global options can be passed before the command name
+ */
+ if (strcmp("serve", argv[i]) == 0)
state |= SERVE;
else if (strcmp("-d", argv[i]) == 0 ||
strcmp("--daemon", argv[i]) == 0)
--- a/contrib/examples/fix.hgrc Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/examples/fix.hgrc Thu Apr 16 22:51:09 2020 +0530
@@ -6,7 +6,7 @@
rustfmt:pattern = set:**.rs
black:command = black --config=black.toml -
-black:pattern = set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"
+black:pattern = set:**.py - mercurial/thirdparty/**
# Mercurial doesn't have any Go code, but if we did this is how we
# would configure `hg fix` for Go:
--- a/contrib/heptapod-ci.yml Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/heptapod-ci.yml Thu Apr 16 22:51:09 2020 +0530
@@ -42,13 +42,13 @@
test-py2:
<<: *runtests
variables:
- RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt"
+ RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
TEST_HGMODULEPOLICY: "c"
test-py3:
<<: *runtests
variables:
- RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt"
+ RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
PYTHON: python3
TEST_HGMODULEPOLICY: "c"
@@ -69,13 +69,13 @@
<<: *runtests
variables:
HGWITHRUSTEXT: cpython
- RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt"
+ RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
TEST_HGMODULEPOLICY: "rust+c"
test-py3-rust:
<<: *runtests
variables:
HGWITHRUSTEXT: cpython
- RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt"
+ RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
PYTHON: python3
TEST_HGMODULEPOLICY: "rust+c"
--- a/contrib/import-checker.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/import-checker.py Thu Apr 16 22:51:09 2020 +0530
@@ -392,9 +392,10 @@
modnotfound = True
continue
yield found[1]
- if modnotfound:
+ if modnotfound and dottedpath != modulename:
# "dottedpath" is a package, but imported because of non-module
# lookup
+ # specifically allow "from . import foo" from __init__.py
yield dottedpath
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/pyoxidizer.bzl Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,60 @@
+# Instructions:
+#
+# 1. cargo install --version 0.5.0 pyoxidizer
+# 2. cd /path/to/hg
+# 3. pyoxidizer build --path contrib/packaging [--release]
+# 4. Run build/pyoxidizer/<arch>/<debug|release>/app/hg
+#
+# If you need to build again, you need to remove the build/lib.* and
+# build/temp.* directories, otherwise PyOxidizer fails to pick up C
+# extensions. This is a bug in PyOxidizer.
+
+ROOT = CWD + "/../.."
+
+set_build_path(ROOT + "/build/pyoxidizer")
+
+def make_exe():
+ dist = default_python_distribution()
+
+ code = "import hgdemandimport; hgdemandimport.enable(); from mercurial import dispatch; dispatch.run()"
+
+ config = PythonInterpreterConfig(
+ raw_allocator = "system",
+ run_eval = code,
+ # We want to let the user load extensions from the file system
+ filesystem_importer = True,
+ # We need this to make resourceutil happy, since it looks for sys.frozen.
+ sys_frozen = True,
+ legacy_windows_stdio = True,
+ )
+
+ exe = dist.to_python_executable(
+ name = "hg",
+ config = config,
+ )
+
+ # Use setup.py install to build Mercurial and collect Python resources to
+ # embed in the executable.
+ resources = dist.setup_py_install(ROOT)
+ exe.add_python_resources(resources)
+
+ return exe
+
+def make_install(exe):
+ m = FileManifest()
+
+ # `hg` goes in root directory.
+ m.add_python_resource(".", exe)
+
+ templates = glob(
+ include=[ROOT + "/mercurial/templates/**/*"],
+ strip_prefix = ROOT + "/mercurial/",
+ )
+ m.add_manifest(templates)
+
+ return m
+
+register_target("exe", make_exe)
+register_target("app", make_install, depends = ["exe"], default = True)
+
+resolve_targets()
--- a/contrib/perf.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/perf.py Thu Apr 16 22:51:09 2020 +0530
@@ -1536,6 +1536,7 @@
matters.
Example of useful set to test:
+
* tip
* 0
* -10:
@@ -2522,7 +2523,7 @@
}
for diffopt in ('', 'w', 'b', 'B', 'wB'):
- opts = dict((options[c], b'1') for c in diffopt)
+ opts = {options[c]: b'1' for c in diffopt}
def d():
ui.pushbuffer()
@@ -3047,7 +3048,7 @@
# Verify engines argument.
if engines:
- engines = set(e.strip() for e in engines.split(b','))
+ engines = {e.strip() for e in engines.split(b',')}
for engine in engines:
try:
util.compressionengines[engine]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/phab-clean.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# A small script to automatically reject idle Diffs
+#
+# you need to set the PHABBOT_USER and PHABBOT_TOKEN environment variable for authentication
+from __future__ import absolute_import, print_function
+
+import datetime
+import os
+import sys
+
+import phabricator
+
+MESSAGE = """There seems to have been no activities on this Diff for the past 3 Months.
+
+By policy, we are automatically moving it out of the `need-review` state.
+
+Please, move it back to `need-review` without hesitation if this diff should still be discussed.
+
+:baymax:need-review-idle:
+"""
+
+
+PHAB_URL = "https://phab.mercurial-scm.org/api/"
+USER = os.environ.get("PHABBOT_USER", "baymax")
+TOKEN = os.environ.get("PHABBOT_TOKEN")
+
+
+NOW = datetime.datetime.now()
+
+# 3 months in seconds
+DELAY = 60 * 60 * 24 * 30 * 3
+
+
+def get_all_diff(phab):
+ """Fetch all the diff that the need review"""
+ return phab.differential.query(
+ status="status-needs-review",
+ order="order-modified",
+ paths=[('HG', None)],
+ )
+
+
+def filter_diffs(diffs, older_than):
+ """filter diffs to only keep the one unmodified sin <older_than> seconds"""
+ olds = []
+ for d in diffs:
+ modified = int(d['dateModified'])
+ modified = datetime.datetime.fromtimestamp(modified)
+ d["idleFor"] = idle_for = NOW - modified
+ if idle_for.total_seconds() > older_than:
+ olds.append(d)
+ return olds
+
+
+def nudge_diff(phab, diff):
+ """Comment on the idle diff and reject it"""
+ diff_id = int(d['id'])
+ phab.differential.createcomment(
+ revision_id=diff_id, message=MESSAGE, action="reject"
+ )
+
+
+if not USER:
+ print(
+ "not user specified please set PHABBOT_USER and PHABBOT_TOKEN",
+ file=sys.stderr,
+ )
+elif not TOKEN:
+ print(
+ "not api-token specified please set PHABBOT_USER and PHABBOT_TOKEN",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+phab = phabricator.Phabricator(USER, host=PHAB_URL, token=TOKEN)
+phab.connect()
+phab.update_interfaces()
+print('Hello "%s".' % phab.user.whoami()['realName'])
+
+diffs = get_all_diff(phab)
+print("Found %d Diffs" % len(diffs))
+olds = filter_diffs(diffs, DELAY)
+print("Found %d old Diffs" % len(olds))
+for d in olds:
+ diff_id = d['id']
+ status = d['statusName']
+ modified = int(d['dateModified'])
+ idle_for = d["idleFor"]
+ msg = 'nudging D%s in "%s" state for %s'
+ print(msg % (diff_id, status, idle_for))
+ # uncomment to actually affect phab
+ nudge_diff(phab, d)
--- a/contrib/python-zstandard/make_cffi.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/make_cffi.py Thu Apr 16 22:51:09 2020 +0530
@@ -52,7 +52,8 @@
# Headers whose preprocessed output will be fed into cdef().
HEADERS = [
- os.path.join(HERE, "zstd", *p) for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
+ os.path.join(HERE, "zstd", *p)
+ for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
]
INCLUDE_DIRS = [
@@ -139,7 +140,9 @@
env = dict(os.environ)
if getattr(compiler, "_paths", None):
env["PATH"] = compiler._paths
- process = subprocess.Popen(args + [input_file], stdout=subprocess.PIPE, env=env)
+ process = subprocess.Popen(
+ args + [input_file], stdout=subprocess.PIPE, env=env
+ )
output = process.communicate()[0]
ret = process.poll()
if ret:
--- a/contrib/python-zstandard/setup.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/setup.py Thu Apr 16 22:51:09 2020 +0530
@@ -87,7 +87,9 @@
break
if not version:
- raise Exception("could not resolve package version; " "this should never happen")
+ raise Exception(
+ "could not resolve package version; " "this should never happen"
+ )
setup(
name="zstandard",
--- a/contrib/python-zstandard/setup_zstd.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/setup_zstd.py Thu Apr 16 22:51:09 2020 +0530
@@ -138,12 +138,16 @@
if not system_zstd:
sources.update([os.path.join(actual_root, p) for p in zstd_sources])
if support_legacy:
- sources.update([os.path.join(actual_root, p) for p in zstd_sources_legacy])
+ sources.update(
+ [os.path.join(actual_root, p) for p in zstd_sources_legacy]
+ )
sources = list(sources)
include_dirs = set([os.path.join(actual_root, d) for d in ext_includes])
if not system_zstd:
- include_dirs.update([os.path.join(actual_root, d) for d in zstd_includes])
+ include_dirs.update(
+ [os.path.join(actual_root, d) for d in zstd_includes]
+ )
if support_legacy:
include_dirs.update(
[os.path.join(actual_root, d) for d in zstd_includes_legacy]
--- a/contrib/python-zstandard/tests/common.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/common.py Thu Apr 16 22:51:09 2020 +0530
@@ -50,7 +50,9 @@
os.environ.update(old_env)
if mod.backend != "cffi":
- raise Exception("got the zstandard %s backend instead of cffi" % mod.backend)
+ raise Exception(
+ "got the zstandard %s backend instead of cffi" % mod.backend
+ )
# If CFFI version is available, dynamically construct test methods
# that use it.
@@ -84,7 +86,9 @@
fn.__func__.func_defaults,
fn.__func__.func_closure,
)
- new_method = types.UnboundMethodType(new_fn, fn.im_self, fn.im_class)
+ new_method = types.UnboundMethodType(
+ new_fn, fn.im_self, fn.im_class
+ )
setattr(cls, name, new_method)
@@ -194,4 +198,6 @@
expensive_settings = hypothesis.settings(deadline=None, max_examples=10000)
hypothesis.settings.register_profile("expensive", expensive_settings)
- hypothesis.settings.load_profile(os.environ.get("HYPOTHESIS_PROFILE", "default"))
+ hypothesis.settings.load_profile(
+ os.environ.get("HYPOTHESIS_PROFILE", "default")
+ )
--- a/contrib/python-zstandard/tests/test_buffer_util.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_buffer_util.py Thu Apr 16 22:51:09 2020 +0530
@@ -67,7 +67,8 @@
self.skipTest("BufferWithSegments not available")
b = zstd.BufferWithSegments(
- b"foofooxfooxy", b"".join([ss.pack(0, 3), ss.pack(3, 4), ss.pack(7, 5)])
+ b"foofooxfooxy",
+ b"".join([ss.pack(0, 3), ss.pack(3, 4), ss.pack(7, 5)]),
)
self.assertEqual(len(b), 3)
self.assertEqual(b.size, 12)
@@ -83,17 +84,23 @@
if not hasattr(zstd, "BufferWithSegmentsCollection"):
self.skipTest("BufferWithSegmentsCollection not available")
- with self.assertRaisesRegex(ValueError, "must pass at least 1 argument"):
+ with self.assertRaisesRegex(
+ ValueError, "must pass at least 1 argument"
+ ):
zstd.BufferWithSegmentsCollection()
def test_argument_validation(self):
if not hasattr(zstd, "BufferWithSegmentsCollection"):
self.skipTest("BufferWithSegmentsCollection not available")
- with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"):
+ with self.assertRaisesRegex(
+ TypeError, "arguments must be BufferWithSegments"
+ ):
zstd.BufferWithSegmentsCollection(None)
- with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"):
+ with self.assertRaisesRegex(
+ TypeError, "arguments must be BufferWithSegments"
+ ):
zstd.BufferWithSegmentsCollection(
zstd.BufferWithSegments(b"foo", ss.pack(0, 3)), None
)
--- a/contrib/python-zstandard/tests/test_compressor.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_compressor.py Thu Apr 16 22:51:09 2020 +0530
@@ -24,7 +24,9 @@
def multithreaded_chunk_size(level, source_size=0):
- params = zstd.ZstdCompressionParameters.from_level(level, source_size=source_size)
+ params = zstd.ZstdCompressionParameters.from_level(
+ level, source_size=source_size
+ )
return 1 << (params.window_log + 2)
@@ -86,7 +88,9 @@
# This matches the test for read_to_iter() below.
cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
- result = cctx.compress(b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b"o")
+ result = cctx.compress(
+ b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b"o"
+ )
self.assertEqual(
result,
b"\x28\xb5\x2f\xfd\x00\x40\x54\x00\x00"
@@ -99,7 +103,9 @@
result = cctx.compress(b"foo" * 256)
def test_no_magic(self):
- params = zstd.ZstdCompressionParameters.from_level(1, format=zstd.FORMAT_ZSTD1)
+ params = zstd.ZstdCompressionParameters.from_level(
+ 1, format=zstd.FORMAT_ZSTD1
+ )
cctx = zstd.ZstdCompressor(compression_params=params)
magic = cctx.compress(b"foobar")
@@ -223,7 +229,8 @@
self.assertEqual(
result,
- b"\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00" b"\x66\x6f\x6f",
+ b"\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00"
+ b"\x66\x6f\x6f",
)
def test_multithreaded_compression_params(self):
@@ -234,7 +241,9 @@
params = zstd.get_frame_parameters(result)
self.assertEqual(params.content_size, 3)
- self.assertEqual(result, b"\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f")
+ self.assertEqual(
+ result, b"\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f"
+ )
@make_cffi
@@ -347,7 +356,9 @@
)
self.assertEqual(cobj.compress(b"bar"), b"")
# 3 byte header plus content.
- self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"\x18\x00\x00bar")
+ self.assertEqual(
+ cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"\x18\x00\x00bar"
+ )
self.assertEqual(cobj.flush(), b"\x01\x00\x00")
def test_flush_empty_block(self):
@@ -445,7 +456,9 @@
self.assertEqual(int(r), 0)
self.assertEqual(w, 9)
- self.assertEqual(dest.getvalue(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
+ self.assertEqual(
+ dest.getvalue(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00"
+ )
def test_large_data(self):
source = io.BytesIO()
@@ -478,7 +491,9 @@
cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
cctx.copy_stream(source, with_checksum)
- self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4)
+ self.assertEqual(
+ len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4
+ )
no_params = zstd.get_frame_parameters(no_checksum.getvalue())
with_params = zstd.get_frame_parameters(with_checksum.getvalue())
@@ -585,7 +600,9 @@
cctx = zstd.ZstdCompressor()
with cctx.stream_reader(b"foo") as reader:
- with self.assertRaisesRegex(ValueError, "cannot __enter__ multiple times"):
+ with self.assertRaisesRegex(
+ ValueError, "cannot __enter__ multiple times"
+ ):
with reader as reader2:
pass
@@ -744,7 +761,9 @@
source = io.BytesIO(b"foobar")
with cctx.stream_reader(source, size=2) as reader:
- with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
+ with self.assertRaisesRegex(
+ zstd.ZstdError, "Src size is incorrect"
+ ):
reader.read(10)
# Try another compression operation.
@@ -1126,7 +1145,9 @@
self.assertFalse(no_params.has_checksum)
self.assertTrue(with_params.has_checksum)
- self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4)
+ self.assertEqual(
+ len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4
+ )
def test_write_content_size(self):
no_size = NonClosingBytesIO()
@@ -1145,7 +1166,9 @@
# Declaring size will write the header.
with_size = NonClosingBytesIO()
- with cctx.stream_writer(with_size, size=len(b"foobar" * 256)) as compressor:
+ with cctx.stream_writer(
+ with_size, size=len(b"foobar" * 256)
+ ) as compressor:
self.assertEqual(compressor.write(b"foobar" * 256), 0)
no_params = zstd.get_frame_parameters(no_size.getvalue())
@@ -1191,7 +1214,9 @@
self.assertFalse(no_params.has_checksum)
self.assertFalse(with_params.has_checksum)
- self.assertEqual(len(with_dict_id.getvalue()), len(no_dict_id.getvalue()) + 4)
+ self.assertEqual(
+ len(with_dict_id.getvalue()), len(no_dict_id.getvalue()) + 4
+ )
def test_memory_size(self):
cctx = zstd.ZstdCompressor(level=3)
@@ -1337,7 +1362,9 @@
for chunk in cctx.read_to_iter(b"foobar"):
pass
- with self.assertRaisesRegex(ValueError, "must pass an object with a read"):
+ with self.assertRaisesRegex(
+ ValueError, "must pass an object with a read"
+ ):
for chunk in cctx.read_to_iter(True):
pass
@@ -1513,7 +1540,9 @@
dctx = zstd.ZstdDecompressor()
- self.assertEqual(dctx.decompress(b"".join(chunks)), (b"x" * 1000) + (b"y" * 24))
+ self.assertEqual(
+ dctx.decompress(b"".join(chunks)), (b"x" * 1000) + (b"y" * 24)
+ )
def test_small_chunk_size(self):
cctx = zstd.ZstdCompressor()
@@ -1533,7 +1562,8 @@
dctx = zstd.ZstdDecompressor()
self.assertEqual(
- dctx.decompress(b"".join(chunks), max_output_size=10000), b"foo" * 1024
+ dctx.decompress(b"".join(chunks), max_output_size=10000),
+ b"foo" * 1024,
)
def test_input_types(self):
@@ -1602,7 +1632,8 @@
list(chunker.finish())
with self.assertRaisesRegex(
- zstd.ZstdError, r"cannot call compress\(\) after compression finished"
+ zstd.ZstdError,
+ r"cannot call compress\(\) after compression finished",
):
list(chunker.compress(b"foo"))
@@ -1644,7 +1675,9 @@
with self.assertRaises(TypeError):
cctx.multi_compress_to_buffer((1, 2))
- with self.assertRaisesRegex(TypeError, "item 0 not a bytes like object"):
+ with self.assertRaisesRegex(
+ TypeError, "item 0 not a bytes like object"
+ ):
cctx.multi_compress_to_buffer([u"foo"])
def test_empty_input(self):
--- a/contrib/python-zstandard/tests/test_compressor_fuzzing.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_compressor_fuzzing.py Thu Apr 16 22:51:09 2020 +0530
@@ -28,9 +28,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_stream_source_read(self, original, level, source_read_size, read_size):
+ def test_stream_source_read(
+ self, original, level, source_read_size, read_size
+ ):
if read_size == 0:
read_size = -1
@@ -58,9 +62,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_buffer_source_read(self, original, level, source_read_size, read_size):
+ def test_buffer_source_read(
+ self, original, level, source_read_size, read_size
+ ):
if read_size == 0:
read_size = -1
@@ -155,9 +163,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_stream_source_readinto(self, original, level, source_read_size, read_size):
+ def test_stream_source_readinto(
+ self, original, level, source_read_size, read_size
+ ):
refctx = zstd.ZstdCompressor(level=level)
ref_frame = refctx.compress(original)
@@ -184,9 +196,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_buffer_source_readinto(self, original, level, source_read_size, read_size):
+ def test_buffer_source_readinto(
+ self, original, level, source_read_size, read_size
+ ):
refctx = zstd.ZstdCompressor(level=level)
ref_frame = refctx.compress(original)
@@ -285,9 +301,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_stream_source_read1(self, original, level, source_read_size, read_size):
+ def test_stream_source_read1(
+ self, original, level, source_read_size, read_size
+ ):
if read_size == 0:
read_size = -1
@@ -315,9 +335,13 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
- def test_buffer_source_read1(self, original, level, source_read_size, read_size):
+ def test_buffer_source_read1(
+ self, original, level, source_read_size, read_size
+ ):
if read_size == 0:
read_size = -1
@@ -412,7 +436,9 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
def test_stream_source_readinto1(
self, original, level, source_read_size, read_size
@@ -446,7 +472,9 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 16384),
- read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
+ read_size=strategies.integers(
+ 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ ),
)
def test_buffer_source_readinto1(
self, original, level, source_read_size, read_size
@@ -576,7 +604,9 @@
read_size=strategies.integers(min_value=1, max_value=1048576),
write_size=strategies.integers(min_value=1, max_value=1048576),
)
- def test_read_write_size_variance(self, original, level, read_size, write_size):
+ def test_read_write_size_variance(
+ self, original, level, read_size, write_size
+ ):
refctx = zstd.ZstdCompressor(level=level)
ref_frame = refctx.compress(original)
@@ -585,7 +615,11 @@
dest = io.BytesIO()
cctx.copy_stream(
- source, dest, size=len(original), read_size=read_size, write_size=write_size
+ source,
+ dest,
+ size=len(original),
+ read_size=read_size,
+ write_size=write_size,
)
self.assertEqual(dest.getvalue(), ref_frame)
@@ -675,7 +709,9 @@
decompressed_chunks.append(dobj.decompress(chunk))
self.assertEqual(
- dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
+ dctx.decompress(
+ b"".join(compressed_chunks), max_output_size=len(original)
+ ),
original,
)
self.assertEqual(b"".join(decompressed_chunks), original)
@@ -690,7 +726,9 @@
read_size=strategies.integers(min_value=1, max_value=4096),
write_size=strategies.integers(min_value=1, max_value=4096),
)
- def test_read_write_size_variance(self, original, level, read_size, write_size):
+ def test_read_write_size_variance(
+ self, original, level, read_size, write_size
+ ):
refcctx = zstd.ZstdCompressor(level=level)
ref_frame = refcctx.compress(original)
@@ -699,7 +737,10 @@
cctx = zstd.ZstdCompressor(level=level)
chunks = list(
cctx.read_to_iter(
- source, size=len(original), read_size=read_size, write_size=write_size
+ source,
+ size=len(original),
+ read_size=read_size,
+ write_size=write_size,
)
)
@@ -710,7 +751,9 @@
class TestCompressor_multi_compress_to_buffer_fuzzing(TestCase):
@hypothesis.given(
original=strategies.lists(
- strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
+ strategies.sampled_from(random_input_data()),
+ min_size=1,
+ max_size=1024,
),
threads=strategies.integers(min_value=1, max_value=8),
use_dict=strategies.booleans(),
@@ -776,7 +819,8 @@
dctx = zstd.ZstdDecompressor()
self.assertEqual(
- dctx.decompress(b"".join(chunks), max_output_size=len(original)), original
+ dctx.decompress(b"".join(chunks), max_output_size=len(original)),
+ original,
)
self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1]))
@@ -794,7 +838,9 @@
input_sizes=strategies.data(),
flushes=strategies.data(),
)
- def test_flush_block(self, original, level, chunk_size, input_sizes, flushes):
+ def test_flush_block(
+ self, original, level, chunk_size, input_sizes, flushes
+ ):
cctx = zstd.ZstdCompressor(level=level)
chunker = cctx.chunker(chunk_size=chunk_size)
@@ -830,7 +876,9 @@
decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
self.assertEqual(
- dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
+ dctx.decompress(
+ b"".join(compressed_chunks), max_output_size=len(original)
+ ),
original,
)
self.assertEqual(b"".join(decompressed_chunks), original)
--- a/contrib/python-zstandard/tests/test_data_structures.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_data_structures.py Thu Apr 16 22:51:09 2020 +0530
@@ -65,7 +65,9 @@
p = zstd.ZstdCompressionParameters(threads=4)
self.assertEqual(p.threads, 4)
- p = zstd.ZstdCompressionParameters(threads=2, job_size=1048576, overlap_log=6)
+ p = zstd.ZstdCompressionParameters(
+ threads=2, job_size=1048576, overlap_log=6
+ )
self.assertEqual(p.threads, 2)
self.assertEqual(p.job_size, 1048576)
self.assertEqual(p.overlap_log, 6)
@@ -128,7 +130,9 @@
with self.assertRaisesRegex(
ValueError, "cannot specify both ldm_hash_rate_log"
):
- zstd.ZstdCompressionParameters(ldm_hash_rate_log=8, ldm_hash_every_log=4)
+ zstd.ZstdCompressionParameters(
+ ldm_hash_rate_log=8, ldm_hash_every_log=4
+ )
p = zstd.ZstdCompressionParameters(ldm_hash_rate_log=8)
self.assertEqual(p.ldm_hash_every_log, 8)
@@ -137,7 +141,9 @@
self.assertEqual(p.ldm_hash_every_log, 16)
def test_overlap_log(self):
- with self.assertRaisesRegex(ValueError, "cannot specify both overlap_log"):
+ with self.assertRaisesRegex(
+ ValueError, "cannot specify both overlap_log"
+ ):
zstd.ZstdCompressionParameters(overlap_log=1, overlap_size_log=9)
p = zstd.ZstdCompressionParameters(overlap_log=2)
@@ -169,10 +175,14 @@
zstd.get_frame_parameters(u"foobarbaz")
def test_invalid_input_sizes(self):
- with self.assertRaisesRegex(zstd.ZstdError, "not enough data for frame"):
+ with self.assertRaisesRegex(
+ zstd.ZstdError, "not enough data for frame"
+ ):
zstd.get_frame_parameters(b"")
- with self.assertRaisesRegex(zstd.ZstdError, "not enough data for frame"):
+ with self.assertRaisesRegex(
+ zstd.ZstdError, "not enough data for frame"
+ ):
zstd.get_frame_parameters(zstd.FRAME_HEADER)
def test_invalid_frame(self):
@@ -201,7 +211,9 @@
self.assertTrue(params.has_checksum)
# Upper 2 bits indicate content size.
- params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x40\x00\xff\x00")
+ params = zstd.get_frame_parameters(
+ zstd.FRAME_HEADER + b"\x40\x00\xff\x00"
+ )
self.assertEqual(params.content_size, 511)
self.assertEqual(params.window_size, 1024)
self.assertEqual(params.dict_id, 0)
@@ -215,7 +227,9 @@
self.assertFalse(params.has_checksum)
# Set multiple things.
- params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x45\x40\x0f\x10\x00")
+ params = zstd.get_frame_parameters(
+ zstd.FRAME_HEADER + b"\x45\x40\x0f\x10\x00"
+ )
self.assertEqual(params.content_size, 272)
self.assertEqual(params.window_size, 262144)
self.assertEqual(params.dict_id, 15)
--- a/contrib/python-zstandard/tests/test_data_structures_fuzzing.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_data_structures_fuzzing.py Thu Apr 16 22:51:09 2020 +0530
@@ -23,7 +23,9 @@
s_chainlog = strategies.integers(
min_value=zstd.CHAINLOG_MIN, max_value=zstd.CHAINLOG_MAX
)
-s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN, max_value=zstd.HASHLOG_MAX)
+s_hashlog = strategies.integers(
+ min_value=zstd.HASHLOG_MIN, max_value=zstd.HASHLOG_MAX
+)
s_searchlog = strategies.integers(
min_value=zstd.SEARCHLOG_MIN, max_value=zstd.SEARCHLOG_MAX
)
@@ -61,7 +63,14 @@
s_strategy,
)
def test_valid_init(
- self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy
+ self,
+ windowlog,
+ chainlog,
+ hashlog,
+ searchlog,
+ minmatch,
+ targetlength,
+ strategy,
):
zstd.ZstdCompressionParameters(
window_log=windowlog,
@@ -83,7 +92,14 @@
s_strategy,
)
def test_estimated_compression_context_size(
- self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy
+ self,
+ windowlog,
+ chainlog,
+ hashlog,
+ searchlog,
+ minmatch,
+ targetlength,
+ strategy,
):
if minmatch == zstd.MINMATCH_MIN and strategy in (
zstd.STRATEGY_FAST,
--- a/contrib/python-zstandard/tests/test_decompressor.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_decompressor.py Thu Apr 16 22:51:09 2020 +0530
@@ -170,11 +170,15 @@
dctx.decompress(compressed, max_output_size=len(source) - 1)
# Input size + 1 works
- decompressed = dctx.decompress(compressed, max_output_size=len(source) + 1)
+ decompressed = dctx.decompress(
+ compressed, max_output_size=len(source) + 1
+ )
self.assertEqual(decompressed, source)
# A much larger buffer works.
- decompressed = dctx.decompress(compressed, max_output_size=len(source) * 64)
+ decompressed = dctx.decompress(
+ compressed, max_output_size=len(source) * 64
+ )
self.assertEqual(decompressed, source)
def test_stupidly_large_output_buffer(self):
@@ -237,7 +241,8 @@
dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN)
with self.assertRaisesRegex(
- zstd.ZstdError, "decompression error: Frame requires too much memory"
+ zstd.ZstdError,
+ "decompression error: Frame requires too much memory",
):
dctx.decompress(frame, max_output_size=len(source))
@@ -291,7 +296,9 @@
self.assertEqual(w, len(source.getvalue()))
def test_read_write_size(self):
- source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar"))
+ source = OpCountingBytesIO(
+ zstd.ZstdCompressor().compress(b"foobarfoobar")
+ )
dest = OpCountingBytesIO()
dctx = zstd.ZstdDecompressor()
@@ -309,7 +316,9 @@
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(b"foo") as reader:
- with self.assertRaisesRegex(ValueError, "cannot __enter__ multiple times"):
+ with self.assertRaisesRegex(
+ ValueError, "cannot __enter__ multiple times"
+ ):
with reader as reader2:
pass
@@ -474,7 +483,9 @@
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame) as reader:
- with self.assertRaisesRegex(ValueError, "cannot seek to negative position"):
+ with self.assertRaisesRegex(
+ ValueError, "cannot seek to negative position"
+ ):
reader.seek(-1, os.SEEK_SET)
reader.read(1)
@@ -490,7 +501,8 @@
reader.seek(-1, os.SEEK_CUR)
with self.assertRaisesRegex(
- ValueError, "zstd decompression streams cannot be seeked with SEEK_END"
+ ValueError,
+ "zstd decompression streams cannot be seeked with SEEK_END",
):
reader.seek(0, os.SEEK_END)
@@ -743,7 +755,9 @@
def test_read_lines(self):
cctx = zstd.ZstdCompressor()
- source = b"\n".join(("line %d" % i).encode("ascii") for i in range(1024))
+ source = b"\n".join(
+ ("line %d" % i).encode("ascii") for i in range(1024)
+ )
frame = cctx.compress(source)
@@ -821,7 +835,9 @@
dobj = dctx.decompressobj()
dobj.decompress(data)
- with self.assertRaisesRegex(zstd.ZstdError, "cannot use a decompressobj"):
+ with self.assertRaisesRegex(
+ zstd.ZstdError, "cannot use a decompressobj"
+ ):
dobj.decompress(data)
self.assertIsNone(dobj.flush())
@@ -1124,7 +1140,9 @@
# Buffer protocol works.
dctx.read_to_iter(b"foobar")
- with self.assertRaisesRegex(ValueError, "must pass an object with a read"):
+ with self.assertRaisesRegex(
+ ValueError, "must pass an object with a read"
+ ):
b"".join(dctx.read_to_iter(True))
def test_empty_input(self):
@@ -1226,7 +1244,9 @@
decompressed = b"".join(chunks)
self.assertEqual(decompressed, source.getvalue())
- @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
+ @unittest.skipUnless(
+ "ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set"
+ )
def test_large_input(self):
bytes = list(struct.Struct(">B").pack(i) for i in range(256))
compressed = NonClosingBytesIO()
@@ -1241,13 +1261,16 @@
len(compressed.getvalue())
> zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
)
- have_raw = input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2
+ have_raw = (
+ input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2
+ )
if have_compressed and have_raw:
break
compressed = io.BytesIO(compressed.getvalue())
self.assertGreater(
- len(compressed.getvalue()), zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
+ len(compressed.getvalue()),
+ zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE,
)
dctx = zstd.ZstdDecompressor()
@@ -1303,7 +1326,9 @@
self.assertEqual(streamed, source.getvalue())
def test_read_write_size(self):
- source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar"))
+ source = OpCountingBytesIO(
+ zstd.ZstdCompressor().compress(b"foobarfoobar")
+ )
dctx = zstd.ZstdDecompressor()
for chunk in dctx.read_to_iter(source, read_size=1, write_size=1):
self.assertEqual(len(chunk), 1)
@@ -1355,10 +1380,14 @@
):
dctx.decompress_content_dict_chain([zstd.FRAME_HEADER])
- with self.assertRaisesRegex(ValueError, "chunk 0 is not a valid zstd frame"):
+ with self.assertRaisesRegex(
+ ValueError, "chunk 0 is not a valid zstd frame"
+ ):
dctx.decompress_content_dict_chain([b"foo" * 8])
- no_size = zstd.ZstdCompressor(write_content_size=False).compress(b"foo" * 64)
+ no_size = zstd.ZstdCompressor(write_content_size=False).compress(
+ b"foo" * 64
+ )
with self.assertRaisesRegex(
ValueError, "chunk 0 missing content size in frame"
@@ -1389,10 +1418,14 @@
):
dctx.decompress_content_dict_chain([initial, zstd.FRAME_HEADER])
- with self.assertRaisesRegex(ValueError, "chunk 1 is not a valid zstd frame"):
+ with self.assertRaisesRegex(
+ ValueError, "chunk 1 is not a valid zstd frame"
+ ):
dctx.decompress_content_dict_chain([initial, b"foo" * 8])
- no_size = zstd.ZstdCompressor(write_content_size=False).compress(b"foo" * 64)
+ no_size = zstd.ZstdCompressor(write_content_size=False).compress(
+ b"foo" * 64
+ )
with self.assertRaisesRegex(
ValueError, "chunk 1 missing content size in frame"
@@ -1400,7 +1433,9 @@
dctx.decompress_content_dict_chain([initial, no_size])
# Corrupt second frame.
- cctx = zstd.ZstdCompressor(dict_data=zstd.ZstdCompressionDict(b"foo" * 64))
+ cctx = zstd.ZstdCompressor(
+ dict_data=zstd.ZstdCompressionDict(b"foo" * 64)
+ )
frame = cctx.compress(b"bar" * 64)
frame = frame[0:12] + frame[15:]
@@ -1447,7 +1482,9 @@
with self.assertRaises(TypeError):
dctx.multi_decompress_to_buffer((1, 2))
- with self.assertRaisesRegex(TypeError, "item 0 not a bytes like object"):
+ with self.assertRaisesRegex(
+ TypeError, "item 0 not a bytes like object"
+ ):
dctx.multi_decompress_to_buffer([u"foo"])
with self.assertRaisesRegex(
@@ -1491,7 +1528,9 @@
if not hasattr(dctx, "multi_decompress_to_buffer"):
self.skipTest("multi_decompress_to_buffer not available")
- result = dctx.multi_decompress_to_buffer(frames, decompressed_sizes=sizes)
+ result = dctx.multi_decompress_to_buffer(
+ frames, decompressed_sizes=sizes
+ )
self.assertEqual(len(result), len(frames))
self.assertEqual(result.size(), sum(map(len, original)))
@@ -1582,10 +1621,15 @@
# And a manual mode.
b = b"".join([frames[0].tobytes(), frames[1].tobytes()])
b1 = zstd.BufferWithSegments(
- b, struct.pack("=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1]))
+ b,
+ struct.pack(
+ "=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1])
+ ),
)
- b = b"".join([frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()])
+ b = b"".join(
+ [frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()]
+ )
b2 = zstd.BufferWithSegments(
b,
struct.pack(
--- a/contrib/python-zstandard/tests/test_decompressor_fuzzing.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_decompressor_fuzzing.py Thu Apr 16 22:51:09 2020 +0530
@@ -196,7 +196,9 @@
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
)
- def test_stream_source_readall(self, original, level, streaming, source_read_size):
+ def test_stream_source_readall(
+ self, original, level, streaming, source_read_size
+ ):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
@@ -398,7 +400,9 @@
write_size=strategies.integers(min_value=1, max_value=8192),
input_sizes=strategies.data(),
)
- def test_write_size_variance(self, original, level, write_size, input_sizes):
+ def test_write_size_variance(
+ self, original, level, write_size, input_sizes
+ ):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
@@ -433,7 +437,9 @@
read_size=strategies.integers(min_value=1, max_value=8192),
write_size=strategies.integers(min_value=1, max_value=8192),
)
- def test_read_write_size_variance(self, original, level, read_size, write_size):
+ def test_read_write_size_variance(
+ self, original, level, read_size, write_size
+ ):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
@@ -441,7 +447,9 @@
dest = io.BytesIO()
dctx = zstd.ZstdDecompressor()
- dctx.copy_stream(source, dest, read_size=read_size, write_size=write_size)
+ dctx.copy_stream(
+ source, dest, read_size=read_size, write_size=write_size
+ )
self.assertEqual(dest.getvalue(), original)
@@ -490,11 +498,14 @@
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
write_size=strategies.integers(
- min_value=1, max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ min_value=1,
+ max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE,
),
chunk_sizes=strategies.data(),
)
- def test_random_output_sizes(self, original, level, write_size, chunk_sizes):
+ def test_random_output_sizes(
+ self, original, level, write_size, chunk_sizes
+ ):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
@@ -524,7 +535,9 @@
read_size=strategies.integers(min_value=1, max_value=4096),
write_size=strategies.integers(min_value=1, max_value=4096),
)
- def test_read_write_size_variance(self, original, level, read_size, write_size):
+ def test_read_write_size_variance(
+ self, original, level, read_size, write_size
+ ):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
@@ -532,7 +545,9 @@
dctx = zstd.ZstdDecompressor()
chunks = list(
- dctx.read_to_iter(source, read_size=read_size, write_size=write_size)
+ dctx.read_to_iter(
+ source, read_size=read_size, write_size=write_size
+ )
)
self.assertEqual(b"".join(chunks), original)
@@ -542,7 +557,9 @@
class TestDecompressor_multi_decompress_to_buffer_fuzzing(TestCase):
@hypothesis.given(
original=strategies.lists(
- strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
+ strategies.sampled_from(random_input_data()),
+ min_size=1,
+ max_size=1024,
),
threads=strategies.integers(min_value=1, max_value=8),
use_dict=strategies.booleans(),
--- a/contrib/python-zstandard/tests/test_train_dictionary.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/tests/test_train_dictionary.py Thu Apr 16 22:51:09 2020 +0530
@@ -51,11 +51,15 @@
self.assertEqual(d.d, 16)
def test_set_dict_id(self):
- d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16, dict_id=42)
+ d = zstd.train_dictionary(
+ 8192, generate_samples(), k=64, d=16, dict_id=42
+ )
self.assertEqual(d.dict_id(), 42)
def test_optimize(self):
- d = zstd.train_dictionary(8192, generate_samples(), threads=-1, steps=1, d=16)
+ d = zstd.train_dictionary(
+ 8192, generate_samples(), threads=-1, steps=1, d=16
+ )
# This varies by platform.
self.assertIn(d.k, (50, 2000))
@@ -71,10 +75,14 @@
def test_bad_precompute_compress(self):
d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16)
- with self.assertRaisesRegex(ValueError, "must specify one of level or "):
+ with self.assertRaisesRegex(
+ ValueError, "must specify one of level or "
+ ):
d.precompute_compress()
- with self.assertRaisesRegex(ValueError, "must only specify one of level or "):
+ with self.assertRaisesRegex(
+ ValueError, "must only specify one of level or "
+ ):
d.precompute_compress(
level=3, compression_params=zstd.CompressionParameters()
)
@@ -88,5 +96,7 @@
d = zstd.ZstdCompressionDict(
b"dictcontent" * 64, dict_type=zstd.DICT_TYPE_FULLDICT
)
- with self.assertRaisesRegex(zstd.ZstdError, "unable to precompute dictionary"):
+ with self.assertRaisesRegex(
+ zstd.ZstdError, "unable to precompute dictionary"
+ ):
d.precompute_compress(level=1)
--- a/contrib/python-zstandard/zstandard/cffi.py Mon Apr 13 16:30:13 2020 +0300
+++ b/contrib/python-zstandard/zstandard/cffi.py Thu Apr 16 22:51:09 2020 +0530
@@ -299,10 +299,14 @@
_set_compression_parameter(params, lib.ZSTD_c_chainLog, chain_log)
_set_compression_parameter(params, lib.ZSTD_c_searchLog, search_log)
_set_compression_parameter(params, lib.ZSTD_c_minMatch, min_match)
- _set_compression_parameter(params, lib.ZSTD_c_targetLength, target_length)
+ _set_compression_parameter(
+ params, lib.ZSTD_c_targetLength, target_length
+ )
if strategy != -1 and compression_strategy != -1:
- raise ValueError("cannot specify both compression_strategy and strategy")
+ raise ValueError(
+ "cannot specify both compression_strategy and strategy"
+ )
if compression_strategy != -1:
strategy = compression_strategy
@@ -313,12 +317,16 @@
_set_compression_parameter(
params, lib.ZSTD_c_contentSizeFlag, write_content_size
)
- _set_compression_parameter(params, lib.ZSTD_c_checksumFlag, write_checksum)
+ _set_compression_parameter(
+ params, lib.ZSTD_c_checksumFlag, write_checksum
+ )
_set_compression_parameter(params, lib.ZSTD_c_dictIDFlag, write_dict_id)
_set_compression_parameter(params, lib.ZSTD_c_jobSize, job_size)
if overlap_log != -1 and overlap_size_log != -1:
- raise ValueError("cannot specify both overlap_log and overlap_size_log")
+ raise ValueError(
+ "cannot specify both overlap_log and overlap_size_log"
+ )
if overlap_size_log != -1:
overlap_log = overlap_size_log
@@ -326,12 +334,16 @@
overlap_log = 0
_set_compression_parameter(params, lib.ZSTD_c_overlapLog, overlap_log)
- _set_compression_parameter(params, lib.ZSTD_c_forceMaxWindow, force_max_window)
+ _set_compression_parameter(
+ params, lib.ZSTD_c_forceMaxWindow, force_max_window
+ )
_set_compression_parameter(
params, lib.ZSTD_c_enableLongDistanceMatching, enable_ldm
)
_set_compression_parameter(params, lib.ZSTD_c_ldmHashLog, ldm_hash_log)
- _set_compression_parameter(params, lib.ZSTD_c_ldmMinMatch, ldm_min_match)
+ _set_compression_parameter(
+ params, lib.ZSTD_c_ldmMinMatch, ldm_min_match
+ )
_set_compression_parameter(
params, lib.ZSTD_c_ldmBucketSizeLog, ldm_bucket_size_log
)
@@ -346,7 +358,9 @@
elif ldm_hash_rate_log == -1:
ldm_hash_rate_log = 0
- _set_compression_parameter(params, lib.ZSTD_c_ldmHashRateLog, ldm_hash_rate_log)
+ _set_compression_parameter(
+ params, lib.ZSTD_c_ldmHashRateLog, ldm_hash_rate_log
+ )
@property
def format(self):
@@ -354,7 +368,9 @@
@property
def compression_level(self):
- return _get_compression_parameter(self._params, lib.ZSTD_c_compressionLevel)
+ return _get_compression_parameter(
+ self._params, lib.ZSTD_c_compressionLevel
+ )
@property
def window_log(self):
@@ -386,7 +402,9 @@
@property
def write_content_size(self):
- return _get_compression_parameter(self._params, lib.ZSTD_c_contentSizeFlag)
+ return _get_compression_parameter(
+ self._params, lib.ZSTD_c_contentSizeFlag
+ )
@property
def write_checksum(self):
@@ -410,7 +428,9 @@
@property
def force_max_window(self):
- return _get_compression_parameter(self._params, lib.ZSTD_c_forceMaxWindow)
+ return _get_compression_parameter(
+ self._params, lib.ZSTD_c_forceMaxWindow
+ )
@property
def enable_ldm(self):
@@ -428,11 +448,15 @@
@property
def ldm_bucket_size_log(self):
- return _get_compression_parameter(self._params, lib.ZSTD_c_ldmBucketSizeLog)
+ return _get_compression_parameter(
+ self._params, lib.ZSTD_c_ldmBucketSizeLog
+ )
@property
def ldm_hash_rate_log(self):
- return _get_compression_parameter(self._params, lib.ZSTD_c_ldmHashRateLog)
+ return _get_compression_parameter(
+ self._params, lib.ZSTD_c_ldmHashRateLog
+ )
@property
def ldm_hash_every_log(self):
@@ -457,7 +481,8 @@
zresult = lib.ZSTD_CCtxParams_setParameter(params, param, value)
if lib.ZSTD_isError(zresult):
raise ZstdError(
- "unable to set compression context parameter: %s" % _zstd_error(zresult)
+ "unable to set compression context parameter: %s"
+ % _zstd_error(zresult)
)
@@ -467,14 +492,17 @@
zresult = lib.ZSTD_CCtxParams_getParameter(params, param, result)
if lib.ZSTD_isError(zresult):
raise ZstdError(
- "unable to get compression context parameter: %s" % _zstd_error(zresult)
+ "unable to get compression context parameter: %s"
+ % _zstd_error(zresult)
)
return result[0]
class ZstdCompressionWriter(object):
- def __init__(self, compressor, writer, source_size, write_size, write_return_read):
+ def __init__(
+ self, compressor, writer, source_size, write_size, write_return_read
+ ):
self._compressor = compressor
self._writer = writer
self._write_size = write_size
@@ -491,7 +519,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(compressor._cctx, source_size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
def __enter__(self):
if self._closed:
@@ -595,13 +625,20 @@
while in_buffer.pos < in_buffer.size:
zresult = lib.ZSTD_compressStream2(
- self._compressor._cctx, out_buffer, in_buffer, lib.ZSTD_e_continue
+ self._compressor._cctx,
+ out_buffer,
+ in_buffer,
+ lib.ZSTD_e_continue,
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
- self._writer.write(ffi.buffer(out_buffer.dst, out_buffer.pos)[:])
+ self._writer.write(
+ ffi.buffer(out_buffer.dst, out_buffer.pos)[:]
+ )
total_write += out_buffer.pos
self._bytes_compressed += out_buffer.pos
out_buffer.pos = 0
@@ -637,10 +674,14 @@
self._compressor._cctx, out_buffer, in_buffer, flush
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
- self._writer.write(ffi.buffer(out_buffer.dst, out_buffer.pos)[:])
+ self._writer.write(
+ ffi.buffer(out_buffer.dst, out_buffer.pos)[:]
+ )
total_write += out_buffer.pos
self._bytes_compressed += out_buffer.pos
out_buffer.pos = 0
@@ -672,7 +713,9 @@
self._compressor._cctx, self._out, source, lib.ZSTD_e_continue
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if self._out.pos:
chunks.append(ffi.buffer(self._out.dst, self._out.pos)[:])
@@ -681,7 +724,10 @@
return b"".join(chunks)
def flush(self, flush_mode=COMPRESSOBJ_FLUSH_FINISH):
- if flush_mode not in (COMPRESSOBJ_FLUSH_FINISH, COMPRESSOBJ_FLUSH_BLOCK):
+ if flush_mode not in (
+ COMPRESSOBJ_FLUSH_FINISH,
+ COMPRESSOBJ_FLUSH_BLOCK,
+ ):
raise ValueError("flush mode not recognized")
if self._finished:
@@ -768,7 +814,9 @@
self._in.pos = 0
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if self._out.pos == self._out.size:
yield ffi.buffer(self._out.dst, self._out.pos)[:]
@@ -780,7 +828,8 @@
if self._in.src != ffi.NULL:
raise ZstdError(
- "cannot call flush() before consuming output from " "previous operation"
+ "cannot call flush() before consuming output from "
+ "previous operation"
)
while True:
@@ -788,7 +837,9 @@
self._compressor._cctx, self._out, self._in, lib.ZSTD_e_flush
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if self._out.pos:
yield ffi.buffer(self._out.dst, self._out.pos)[:]
@@ -812,7 +863,9 @@
self._compressor._cctx, self._out, self._in, lib.ZSTD_e_end
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if self._out.pos:
yield ffi.buffer(self._out.dst, self._out.pos)[:]
@@ -939,7 +992,10 @@
old_pos = out_buffer.pos
zresult = lib.ZSTD_compressStream2(
- self._compressor._cctx, out_buffer, self._in_buffer, lib.ZSTD_e_continue
+ self._compressor._cctx,
+ out_buffer,
+ self._in_buffer,
+ lib.ZSTD_e_continue,
)
self._bytes_compressed += out_buffer.pos - old_pos
@@ -997,7 +1053,9 @@
self._bytes_compressed += out_buffer.pos - old_pos
if lib.ZSTD_isError(zresult):
- raise ZstdError("error ending compression stream: %s", _zstd_error(zresult))
+ raise ZstdError(
+ "error ending compression stream: %s", _zstd_error(zresult)
+ )
if zresult == 0:
self._finished_output = True
@@ -1102,7 +1160,9 @@
self._bytes_compressed += out_buffer.pos - old_pos
if lib.ZSTD_isError(zresult):
- raise ZstdError("error ending compression stream: %s", _zstd_error(zresult))
+ raise ZstdError(
+ "error ending compression stream: %s", _zstd_error(zresult)
+ )
if zresult == 0:
self._finished_output = True
@@ -1170,13 +1230,17 @@
threads=0,
):
if level > lib.ZSTD_maxCLevel():
- raise ValueError("level must be less than %d" % lib.ZSTD_maxCLevel())
+ raise ValueError(
+ "level must be less than %d" % lib.ZSTD_maxCLevel()
+ )
if threads < 0:
threads = _cpu_count()
if compression_params and write_checksum is not None:
- raise ValueError("cannot define compression_params and " "write_checksum")
+ raise ValueError(
+ "cannot define compression_params and " "write_checksum"
+ )
if compression_params and write_content_size is not None:
raise ValueError(
@@ -1184,7 +1248,9 @@
)
if compression_params and write_dict_id is not None:
- raise ValueError("cannot define compression_params and " "write_dict_id")
+ raise ValueError(
+ "cannot define compression_params and " "write_dict_id"
+ )
if compression_params and threads:
raise ValueError("cannot define compression_params and threads")
@@ -1201,7 +1267,9 @@
self._params = ffi.gc(params, lib.ZSTD_freeCCtxParams)
- _set_compression_parameter(self._params, lib.ZSTD_c_compressionLevel, level)
+ _set_compression_parameter(
+ self._params, lib.ZSTD_c_compressionLevel, level
+ )
_set_compression_parameter(
self._params,
@@ -1210,7 +1278,9 @@
)
_set_compression_parameter(
- self._params, lib.ZSTD_c_checksumFlag, 1 if write_checksum else 0
+ self._params,
+ lib.ZSTD_c_checksumFlag,
+ 1 if write_checksum else 0,
)
_set_compression_parameter(
@@ -1218,7 +1288,9 @@
)
if threads:
- _set_compression_parameter(self._params, lib.ZSTD_c_nbWorkers, threads)
+ _set_compression_parameter(
+ self._params, lib.ZSTD_c_nbWorkers, threads
+ )
cctx = lib.ZSTD_createCCtx()
if cctx == ffi.NULL:
@@ -1237,10 +1309,13 @@
)
def _setup_cctx(self):
- zresult = lib.ZSTD_CCtx_setParametersUsingCCtxParams(self._cctx, self._params)
+ zresult = lib.ZSTD_CCtx_setParametersUsingCCtxParams(
+ self._cctx, self._params
+ )
if lib.ZSTD_isError(zresult):
raise ZstdError(
- "could not set compression parameters: %s" % _zstd_error(zresult)
+ "could not set compression parameters: %s"
+ % _zstd_error(zresult)
)
dict_data = self._dict_data
@@ -1259,7 +1334,8 @@
if lib.ZSTD_isError(zresult):
raise ZstdError(
- "could not load compression dictionary: %s" % _zstd_error(zresult)
+ "could not load compression dictionary: %s"
+ % _zstd_error(zresult)
)
def memory_size(self):
@@ -1275,7 +1351,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, len(data_buffer))
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
out_buffer = ffi.new("ZSTD_outBuffer *")
in_buffer = ffi.new("ZSTD_inBuffer *")
@@ -1307,11 +1385,15 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
cobj = ZstdCompressionObj()
cobj._out = ffi.new("ZSTD_outBuffer *")
- cobj._dst_buffer = ffi.new("char[]", COMPRESSION_RECOMMENDED_OUTPUT_SIZE)
+ cobj._dst_buffer = ffi.new(
+ "char[]", COMPRESSION_RECOMMENDED_OUTPUT_SIZE
+ )
cobj._out.dst = cobj._dst_buffer
cobj._out.size = COMPRESSION_RECOMMENDED_OUTPUT_SIZE
cobj._out.pos = 0
@@ -1328,7 +1410,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
return ZstdCompressionChunker(self, chunk_size=chunk_size)
@@ -1353,7 +1437,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
in_buffer = ffi.new("ZSTD_inBuffer *")
out_buffer = ffi.new("ZSTD_outBuffer *")
@@ -1381,7 +1467,9 @@
self._cctx, out_buffer, in_buffer, lib.ZSTD_e_continue
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
ofh.write(ffi.buffer(out_buffer.dst, out_buffer.pos))
@@ -1423,7 +1511,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
return ZstdCompressionReader(self, source, read_size)
@@ -1443,7 +1533,9 @@
if size < 0:
size = lib.ZSTD_CONTENTSIZE_UNKNOWN
- return ZstdCompressionWriter(self, writer, size, write_size, write_return_read)
+ return ZstdCompressionWriter(
+ self, writer, size, write_size, write_return_read
+ )
write_to = stream_writer
@@ -1473,7 +1565,9 @@
zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size)
if lib.ZSTD_isError(zresult):
- raise ZstdError("error setting source size: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "error setting source size: %s" % _zstd_error(zresult)
+ )
in_buffer = ffi.new("ZSTD_inBuffer *")
out_buffer = ffi.new("ZSTD_outBuffer *")
@@ -1517,7 +1611,9 @@
self._cctx, out_buffer, in_buffer, lib.ZSTD_e_continue
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd compress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd compress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
data = ffi.buffer(out_buffer.dst, out_buffer.pos)[:]
@@ -1596,10 +1692,14 @@
data_buffer = ffi.from_buffer(data)
zresult = lib.ZSTD_getFrameHeader(params, data_buffer, len(data_buffer))
if lib.ZSTD_isError(zresult):
- raise ZstdError("cannot get frame parameters: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "cannot get frame parameters: %s" % _zstd_error(zresult)
+ )
if zresult:
- raise ZstdError("not enough data for frame parameters; need %d bytes" % zresult)
+ raise ZstdError(
+ "not enough data for frame parameters; need %d bytes" % zresult
+ )
return FrameParameters(params[0])
@@ -1611,9 +1711,14 @@
self.k = k
self.d = d
- if dict_type not in (DICT_TYPE_AUTO, DICT_TYPE_RAWCONTENT, DICT_TYPE_FULLDICT):
+ if dict_type not in (
+ DICT_TYPE_AUTO,
+ DICT_TYPE_RAWCONTENT,
+ DICT_TYPE_FULLDICT,
+ ):
raise ValueError(
- "invalid dictionary load mode: %d; must use " "DICT_TYPE_* constants"
+ "invalid dictionary load mode: %d; must use "
+ "DICT_TYPE_* constants"
)
self._dict_type = dict_type
@@ -1630,7 +1735,9 @@
def precompute_compress(self, level=0, compression_params=None):
if level and compression_params:
- raise ValueError("must only specify one of level or " "compression_params")
+ raise ValueError(
+ "must only specify one of level or " "compression_params"
+ )
if not level and not compression_params:
raise ValueError("must specify one of level or compression_params")
@@ -1675,7 +1782,9 @@
if ddict == ffi.NULL:
raise ZstdError("could not create decompression dict")
- ddict = ffi.gc(ddict, lib.ZSTD_freeDDict, size=lib.ZSTD_sizeof_DDict(ddict))
+ ddict = ffi.gc(
+ ddict, lib.ZSTD_freeDDict, size=lib.ZSTD_sizeof_DDict(ddict)
+ )
self.__dict__["_ddict"] = ddict
return ddict
@@ -1805,7 +1914,9 @@
self._decompressor._dctx, out_buffer, in_buffer
)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd decompressor error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd decompressor error: %s" % _zstd_error(zresult)
+ )
if zresult == 0:
self._finished = True
@@ -2105,16 +2216,22 @@
if whence == os.SEEK_SET:
if pos < 0:
- raise ValueError("cannot seek to negative position with SEEK_SET")
+ raise ValueError(
+ "cannot seek to negative position with SEEK_SET"
+ )
if pos < self._bytes_decompressed:
- raise ValueError("cannot seek zstd decompression stream " "backwards")
+ raise ValueError(
+ "cannot seek zstd decompression stream " "backwards"
+ )
read_amount = pos - self._bytes_decompressed
elif whence == os.SEEK_CUR:
if pos < 0:
- raise ValueError("cannot seek zstd decompression stream " "backwards")
+ raise ValueError(
+ "cannot seek zstd decompression stream " "backwards"
+ )
read_amount = pos
elif whence == os.SEEK_END:
@@ -2123,7 +2240,9 @@
)
while read_amount:
- result = self.read(min(read_amount, DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE))
+ result = self.read(
+ min(read_amount, DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE)
+ )
if not result:
break
@@ -2257,10 +2376,14 @@
while in_buffer.pos < in_buffer.size:
zresult = lib.ZSTD_decompressStream(dctx, out_buffer, in_buffer)
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd decompress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd decompress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
- self._writer.write(ffi.buffer(out_buffer.dst, out_buffer.pos)[:])
+ self._writer.write(
+ ffi.buffer(out_buffer.dst, out_buffer.pos)[:]
+ )
total_write += out_buffer.pos
out_buffer.pos = 0
@@ -2299,7 +2422,9 @@
data_buffer = ffi.from_buffer(data)
- output_size = lib.ZSTD_getFrameContentSize(data_buffer, len(data_buffer))
+ output_size = lib.ZSTD_getFrameContentSize(
+ data_buffer, len(data_buffer)
+ )
if output_size == lib.ZSTD_CONTENTSIZE_ERROR:
raise ZstdError("error determining content size from frame header")
@@ -2307,7 +2432,9 @@
return b""
elif output_size == lib.ZSTD_CONTENTSIZE_UNKNOWN:
if not max_output_size:
- raise ZstdError("could not determine content size in frame header")
+ raise ZstdError(
+ "could not determine content size in frame header"
+ )
result_buffer = ffi.new("char[]", max_output_size)
result_size = max_output_size
@@ -2330,7 +2457,9 @@
if lib.ZSTD_isError(zresult):
raise ZstdError("decompression error: %s" % _zstd_error(zresult))
elif zresult:
- raise ZstdError("decompression error: did not decompress full frame")
+ raise ZstdError(
+ "decompression error: did not decompress full frame"
+ )
elif output_size and out_buffer.pos != output_size:
raise ZstdError(
"decompression error: decompressed %d bytes; expected %d"
@@ -2346,7 +2475,9 @@
read_across_frames=False,
):
self._ensure_dctx()
- return ZstdDecompressionReader(self, source, read_size, read_across_frames)
+ return ZstdDecompressionReader(
+ self, source, read_size, read_across_frames
+ )
def decompressobj(self, write_size=DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE):
if write_size < 1:
@@ -2421,9 +2552,13 @@
while in_buffer.pos < in_buffer.size:
assert out_buffer.pos == 0
- zresult = lib.ZSTD_decompressStream(self._dctx, out_buffer, in_buffer)
+ zresult = lib.ZSTD_decompressStream(
+ self._dctx, out_buffer, in_buffer
+ )
if lib.ZSTD_isError(zresult):
- raise ZstdError("zstd decompress error: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "zstd decompress error: %s" % _zstd_error(zresult)
+ )
if out_buffer.pos:
data = ffi.buffer(out_buffer.dst, out_buffer.pos)[:]
@@ -2449,7 +2584,9 @@
if not hasattr(writer, "write"):
raise ValueError("must pass an object with a write() method")
- return ZstdDecompressionWriter(self, writer, write_size, write_return_read)
+ return ZstdDecompressionWriter(
+ self, writer, write_size, write_return_read
+ )
write_to = stream_writer
@@ -2491,7 +2628,9 @@
# Flush all read data to output.
while in_buffer.pos < in_buffer.size:
- zresult = lib.ZSTD_decompressStream(self._dctx, out_buffer, in_buffer)
+ zresult = lib.ZSTD_decompressStream(
+ self._dctx, out_buffer, in_buffer
+ )
if lib.ZSTD_isError(zresult):
raise ZstdError(
"zstd decompressor error: %s" % _zstd_error(zresult)
@@ -2521,7 +2660,9 @@
# All chunks should be zstd frames and should have content size set.
chunk_buffer = ffi.from_buffer(chunk)
params = ffi.new("ZSTD_frameHeader *")
- zresult = lib.ZSTD_getFrameHeader(params, chunk_buffer, len(chunk_buffer))
+ zresult = lib.ZSTD_getFrameHeader(
+ params, chunk_buffer, len(chunk_buffer)
+ )
if lib.ZSTD_isError(zresult):
raise ValueError("chunk 0 is not a valid zstd frame")
elif zresult:
@@ -2546,7 +2687,9 @@
zresult = lib.ZSTD_decompressStream(self._dctx, out_buffer, in_buffer)
if lib.ZSTD_isError(zresult):
- raise ZstdError("could not decompress chunk 0: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "could not decompress chunk 0: %s" % _zstd_error(zresult)
+ )
elif zresult:
raise ZstdError("chunk 0 did not decompress full frame")
@@ -2561,11 +2704,15 @@
raise ValueError("chunk %d must be bytes" % i)
chunk_buffer = ffi.from_buffer(chunk)
- zresult = lib.ZSTD_getFrameHeader(params, chunk_buffer, len(chunk_buffer))
+ zresult = lib.ZSTD_getFrameHeader(
+ params, chunk_buffer, len(chunk_buffer)
+ )
if lib.ZSTD_isError(zresult):
raise ValueError("chunk %d is not a valid zstd frame" % i)
elif zresult:
- raise ValueError("chunk %d is too small to contain a zstd frame" % i)
+ raise ValueError(
+ "chunk %d is too small to contain a zstd frame" % i
+ )
if params.frameContentSize == lib.ZSTD_CONTENTSIZE_UNKNOWN:
raise ValueError("chunk %d missing content size in frame" % i)
@@ -2580,7 +2727,9 @@
in_buffer.size = len(chunk_buffer)
in_buffer.pos = 0
- zresult = lib.ZSTD_decompressStream(self._dctx, out_buffer, in_buffer)
+ zresult = lib.ZSTD_decompressStream(
+ self._dctx, out_buffer, in_buffer
+ )
if lib.ZSTD_isError(zresult):
raise ZstdError(
"could not decompress chunk %d: %s" % _zstd_error(zresult)
@@ -2597,7 +2746,9 @@
lib.ZSTD_DCtx_reset(self._dctx, lib.ZSTD_reset_session_only)
if self._max_window_size:
- zresult = lib.ZSTD_DCtx_setMaxWindowSize(self._dctx, self._max_window_size)
+ zresult = lib.ZSTD_DCtx_setMaxWindowSize(
+ self._dctx, self._max_window_size
+ )
if lib.ZSTD_isError(zresult):
raise ZstdError(
"unable to set max window size: %s" % _zstd_error(zresult)
@@ -2605,11 +2756,14 @@
zresult = lib.ZSTD_DCtx_setFormat(self._dctx, self._format)
if lib.ZSTD_isError(zresult):
- raise ZstdError("unable to set decoding format: %s" % _zstd_error(zresult))
+ raise ZstdError(
+ "unable to set decoding format: %s" % _zstd_error(zresult)
+ )
if self._dict_data and load_dict:
zresult = lib.ZSTD_DCtx_refDDict(self._dctx, self._dict_data._ddict)
if lib.ZSTD_isError(zresult):
raise ZstdError(
- "unable to reference prepared dictionary: %s" % _zstd_error(zresult)
+ "unable to reference prepared dictionary: %s"
+ % _zstd_error(zresult)
)
--- a/doc/Makefile Mon Apr 13 16:30:13 2020 +0300
+++ b/doc/Makefile Thu Apr 16 22:51:09 2020 +0530
@@ -5,7 +5,7 @@
../mercurial/helptext/*.txt ../hgext/*.py ../hgext/*/__init__.py
PREFIX=/usr/local
MANDIR=$(PREFIX)/share/man
-INSTALL=install -c -m 644
+INSTALL=install -m 644
PYTHON?=python
RSTARGS=
--- a/hgext/absorb.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/absorb.py Thu Apr 16 22:51:09 2020 +0530
@@ -407,7 +407,7 @@
involved = [
annotated[i] for i in nearbylinenums if annotated[i][0] != 1
]
- involvedrevs = list(set(r for r, l in involved))
+ involvedrevs = list({r for r, l in involved})
newfixups = []
if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
# chunk belongs to a single revision
@@ -734,10 +734,10 @@
@property
def chunkstats(self):
"""-> {path: chunkstats}. collect chunkstats from filefixupstates"""
- return dict(
- (path, state.chunkstats)
+ return {
+ path: state.chunkstats
for path, state in pycompat.iteritems(self.fixupmap)
- )
+ }
def commit(self):
"""commit changes. update self.finalnode, self.replacemap"""
@@ -1077,7 +1077,7 @@
b'i',
b'interactive',
None,
- _(b'interactively select which chunks to apply (EXPERIMENTAL)'),
+ _(b'interactively select which chunks to apply'),
),
(
b'e',
--- a/hgext/beautifygraph.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/beautifygraph.py Thu Apr 16 22:51:09 2020 +0530
@@ -71,6 +71,8 @@
return b'\xE2\x97\x8B' # U+25CB ○
if node == b'@':
return b'\xE2\x97\x8D' # U+25CD ◍
+ if node == b'%':
+ return b'\xE2\x97\x8D' # U+25CE ◎
if node == b'*':
return b'\xE2\x88\x97' # U+2217 ∗
if node == b'x':
--- a/hgext/closehead.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/closehead.py Thu Apr 16 22:51:09 2020 +0530
@@ -76,7 +76,7 @@
heads = []
for branch in repo.branchmap():
heads.extend(repo.branchheads(branch))
- heads = set(repo[h].rev() for h in heads)
+ heads = {repo[h].rev() for h in heads}
for rev in revs:
if rev not in heads:
raise error.Abort(_(b'revision is not an open head: %d') % rev)
--- a/hgext/convert/hg.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/convert/hg.py Thu Apr 16 22:51:09 2020 +0530
@@ -677,13 +677,9 @@
for t in self.repo.tagslist()
if self.repo.tagtype(t[0]) == b'global'
]
- return dict(
- [
- (name, nodemod.hex(node))
- for name, node in tags
- if self.keep(node)
- ]
- )
+ return {
+ name: nodemod.hex(node) for name, node in tags if self.keep(node)
+ }
def getchangedfiles(self, rev, i):
ctx = self._changectx(rev)
--- a/hgext/convert/subversion.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/convert/subversion.py Thu Apr 16 22:51:09 2020 +0530
@@ -710,11 +710,11 @@
# Here/tags/tag.1 discarded as well as its children.
# It happens with tools like cvs2svn. Such tags cannot
# be represented in mercurial.
- addeds = dict(
- (p, e.copyfrom_path)
+ addeds = {
+ p: e.copyfrom_path
for p, e in pycompat.iteritems(origpaths)
if e.action == b'A' and e.copyfrom_path
- )
+ }
badroots = set()
for destroot in addeds:
for source, sourcerev, dest in pendings:
--- a/hgext/eol.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/eol.py Thu Apr 16 22:51:09 2020 +0530
@@ -221,7 +221,7 @@
self.match = match.match(root, b'', [], include, exclude)
def copytoui(self, ui):
- newpatterns = set(pattern for pattern, key, m in self.patterns)
+ newpatterns = {pattern for pattern, key, m in self.patterns}
for section in (b'decode', b'encode'):
for oldpattern, _filter in ui.configitems(section):
if oldpattern not in newpatterns:
--- a/hgext/fastannotate/commands.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/fastannotate/commands.py Thu Apr 16 22:51:09 2020 +0530
@@ -233,7 +233,7 @@
showlines=(showlines and not showdeleted),
)
if showdeleted:
- existinglines = set((l[0], l[1]) for l in result)
+ existinglines = {(l[0], l[1]) for l in result}
result = a.annotatealllines(
rev, showpath=showpath, showlines=showlines
)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/fastexport.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,219 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""export repositories as git fast-import stream"""
+
+# The format specification for fast-import streams can be found at
+# https://git-scm.com/docs/git-fast-import#_input_format
+
+from __future__ import absolute_import
+import re
+
+from mercurial.i18n import _
+from mercurial.node import hex, nullrev
+from mercurial.utils import stringutil
+from mercurial import (
+ error,
+ pycompat,
+ registrar,
+ scmutil,
+)
+from .convert import convcmd
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = b"ships-with-hg-core"
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+GIT_PERSON_PROHIBITED = re.compile(b'[<>\n"]')
+GIT_EMAIL_PROHIBITED = re.compile(b"[<> \n]")
+
+
+def convert_to_git_user(authormap, user, rev):
+ mapped_user = authormap.get(user, user)
+ user_person = stringutil.person(mapped_user)
+ user_email = stringutil.email(mapped_user)
+ if GIT_EMAIL_PROHIBITED.match(user_email) or GIT_PERSON_PROHIBITED.match(
+ user_person
+ ):
+ raise error.Abort(
+ _(b"Unable to parse user into person and email for revision %s")
+ % rev
+ )
+ if user_person:
+ return b'"' + user_person + b'" <' + user_email + b'>'
+ else:
+ return b"<" + user_email + b">"
+
+
+def convert_to_git_date(date):
+ timestamp, utcoff = date
+ tzsign = b"+" if utcoff < 0 else b"-"
+ if utcoff % 60 != 0:
+ raise error.Abort(
+ _(b"UTC offset in %b is not an integer number of seconds") % (date,)
+ )
+ utcoff = abs(utcoff) // 60
+ tzh = utcoff // 60
+ tzmin = utcoff % 60
+ return b"%d " % int(timestamp) + tzsign + b"%02d%02d" % (tzh, tzmin)
+
+
+def convert_to_git_ref(branch):
+ # XXX filter/map depending on git restrictions
+ return b"refs/heads/" + branch
+
+
+def write_data(buf, data, skip_newline):
+ buf.append(b"data %d\n" % len(data))
+ buf.append(data)
+ if not skip_newline or data[-1:] != b"\n":
+ buf.append(b"\n")
+
+
+def export_commit(ui, repo, rev, marks, authormap):
+ ctx = repo[rev]
+ revid = ctx.hex()
+ if revid in marks:
+ ui.warn(_(b"warning: revision %s already exported, skipped\n") % revid)
+ return
+ parents = [p for p in ctx.parents() if p.rev() != nullrev]
+ for p in parents:
+ if p.hex() not in marks:
+ ui.warn(
+ _(b"warning: parent %s of %s has not been exported, skipped\n")
+ % (p, revid)
+ )
+ return
+
+ # For all files modified by the commit, check if they have already
+ # been exported and otherwise dump the blob with the new mark.
+ for fname in ctx.files():
+ if fname not in ctx:
+ continue
+ filectx = ctx.filectx(fname)
+ filerev = hex(filectx.filenode())
+ if filerev not in marks:
+ mark = len(marks) + 1
+ marks[filerev] = mark
+ data = filectx.data()
+ buf = [b"blob\n", b"mark :%d\n" % mark]
+ write_data(buf, data, False)
+ ui.write(*buf, keepprogressbar=True)
+ del buf
+
+ # Assign a mark for the current revision for references by
+ # latter merge commits.
+ mark = len(marks) + 1
+ marks[revid] = mark
+
+ ref = convert_to_git_ref(ctx.branch())
+ buf = [
+ b"commit %s\n" % ref,
+ b"mark :%d\n" % mark,
+ b"committer %s %s\n"
+ % (
+ convert_to_git_user(authormap, ctx.user(), revid),
+ convert_to_git_date(ctx.date()),
+ ),
+ ]
+ write_data(buf, ctx.description(), True)
+ if parents:
+ buf.append(b"from :%d\n" % marks[parents[0].hex()])
+ if len(parents) == 2:
+ buf.append(b"merge :%d\n" % marks[parents[1].hex()])
+ p0ctx = repo[parents[0]]
+ files = ctx.manifest().diff(p0ctx.manifest())
+ else:
+ files = ctx.files()
+ filebuf = []
+ for fname in files:
+ if fname not in ctx:
+ filebuf.append((fname, b"D %s\n" % fname))
+ else:
+ filectx = ctx.filectx(fname)
+ filerev = filectx.filenode()
+ fileperm = b"755" if filectx.isexec() else b"644"
+ changed = b"M %s :%d %s\n" % (fileperm, marks[hex(filerev)], fname)
+ filebuf.append((fname, changed))
+ filebuf.sort()
+ buf.extend(changed for (fname, changed) in filebuf)
+ del filebuf
+ buf.append(b"\n")
+ ui.write(*buf, keepprogressbar=True)
+ del buf
+
+
+isrev = re.compile(b"^[0-9a-f]{40}$")
+
+
+@command(
+ b"fastexport",
+ [
+ (b"r", b"rev", [], _(b"revisions to export"), _(b"REV")),
+ (b"i", b"import-marks", b"", _(b"old marks file to read"), _(b"FILE")),
+ (b"e", b"export-marks", b"", _(b"new marks file to write"), _(b"FILE")),
+ (
+ b"A",
+ b"authormap",
+ b"",
+ _(b"remap usernames using this file"),
+ _(b"FILE"),
+ ),
+ ],
+ _(b"[OPTION]... [REV]..."),
+ helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
+def fastexport(ui, repo, *revs, **opts):
+ """export repository as git fast-import stream
+
+ This command lets you dump a repository as a human-readable text stream.
+ It can be piped into corresponding import routines like "git fast-import".
+ Incremental dumps can be created by using marks files.
+ """
+ opts = pycompat.byteskwargs(opts)
+
+ revs += tuple(opts.get(b"rev", []))
+ if not revs:
+ revs = scmutil.revrange(repo, [b":"])
+ else:
+ revs = scmutil.revrange(repo, revs)
+ if not revs:
+ raise error.Abort(_(b"no revisions matched"))
+ authorfile = opts.get(b"authormap")
+ if authorfile:
+ authormap = convcmd.readauthormap(ui, authorfile)
+ else:
+ authormap = {}
+
+ import_marks = opts.get(b"import_marks")
+ marks = {}
+ if import_marks:
+ with open(import_marks, "rb") as import_marks_file:
+ for line in import_marks_file:
+ line = line.strip()
+ if not isrev.match(line) or line in marks:
+ raise error.Abort(_(b"Corrupted marks file"))
+ marks[line] = len(marks) + 1
+
+ revs.sort()
+ with ui.makeprogress(
+ _(b"exporting"), unit=_(b"revisions"), total=len(revs)
+ ) as progress:
+ for rev in revs:
+ export_commit(ui, repo, rev, marks, authormap)
+ progress.increment()
+
+ export_marks = opts.get(b"export_marks")
+ if export_marks:
+ with open(export_marks, "wb") as export_marks_file:
+ output_marks = [None] * len(marks)
+ for k, v in marks.items():
+ output_marks[v - 1] = k
+ for k in output_marks:
+ export_marks_file.write(k + b"\n")
--- a/hgext/fetch.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/fetch.py Thu Apr 16 22:51:09 2020 +0530
@@ -171,11 +171,11 @@
% (repo.changelog.rev(firstparent), short(firstparent))
)
hg.clean(repo, firstparent)
+ p2ctx = repo[secondparent]
ui.status(
- _(b'merging with %d:%s\n')
- % (repo.changelog.rev(secondparent), short(secondparent))
+ _(b'merging with %d:%s\n') % (p2ctx.rev(), short(secondparent))
)
- err = hg.merge(repo, secondparent, remind=False)
+ err = hg.merge(p2ctx, remind=False)
if not err:
# we don't translate commit messages
--- a/hgext/fix.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/fix.py Thu Apr 16 22:51:09 2020 +0530
@@ -213,7 +213,14 @@
),
_(b'REV'),
)
-revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
+revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
+sourceopt = (
+ b's',
+ b'source',
+ [],
+ _(b'fix the specified revisions and their descendants'),
+ _(b'REV'),
+)
wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
usage = _(b'[OPTION]... [FILE]...')
@@ -221,7 +228,7 @@
@command(
b'fix',
- [allopt, baseopt, revopt, wdiropt, wholeopt],
+ [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
usage,
helpcategory=command.CATEGORY_FILE_CONTENTS,
)
@@ -249,10 +256,11 @@
override this default behavior, though it is not usually desirable to do so.
"""
opts = pycompat.byteskwargs(opts)
- cmdutil.check_at_most_one_arg(opts, b'all', b'rev')
- if opts[b'all']:
- opts[b'rev'] = [b'not public() and not obsolete()']
- opts[b'working_dir'] = True
+ cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
+ cmdutil.check_incompatible_arguments(
+ opts, b'working_dir', [b'all', b'source']
+ )
+
with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
revstofix = getrevstofix(ui, repo, opts)
basectxs = getbasectxs(repo, opts, revstofix)
@@ -398,16 +406,28 @@
def getrevstofix(ui, repo, opts):
"""Returns the set of revision numbers that should be fixed"""
- revs = set(scmutil.revrange(repo, opts[b'rev']))
+ if opts[b'all']:
+ revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
+ elif opts[b'source']:
+ source_revs = scmutil.revrange(repo, opts[b'source'])
+ revs = set(repo.revs(b'%ld::', source_revs))
+ if wdirrev in source_revs:
+ # `wdir()::` is currently empty, so manually add wdir
+ revs.add(wdirrev)
+ if repo[b'.'].rev() in revs:
+ revs.add(wdirrev)
+ else:
+ revs = set(scmutil.revrange(repo, opts[b'rev']))
+ if opts.get(b'working_dir'):
+ revs.add(wdirrev)
for rev in revs:
checkfixablectx(ui, repo, repo[rev])
- if revs:
+ # Allow fixing only wdir() even if there's an unfinished operation
+ if not (len(revs) == 1 and wdirrev in revs):
cmdutil.checkunfinished(repo)
rewriteutil.precheck(repo, revs, b'fix')
- if opts.get(b'working_dir'):
- revs.add(wdirrev)
- if list(merge.mergestate.read(repo).unresolved()):
- raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
+ if wdirrev in revs and list(merge.mergestate.read(repo).unresolved()):
+ raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
if not revs:
raise error.Abort(
b'no changesets specified', hint=b'use --rev or --working-dir'
@@ -735,15 +755,7 @@
wctx = context.overlayworkingctx(repo)
wctx.setbase(repo[newp1node])
- merge.update(
- repo,
- ctx.rev(),
- branchmerge=False,
- force=True,
- ancestor=p1rev,
- mergeancestor=False,
- wc=wctx,
- )
+ merge.revert_to(ctx, wc=wctx)
copies.graftcopies(wctx, ctx, ctx.p1())
for path in filedata.keys():
--- a/hgext/fsmonitor/__init__.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/fsmonitor/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -397,7 +397,7 @@
# for file paths which require normalization and we encounter a case
# collision, we store our own foldmap
if normalize:
- foldmap = dict((normcase(k), k) for k in results)
+ foldmap = {normcase(k): k for k in results}
switch_slashes = pycompat.ossep == b'\\'
# The order of the results is, strictly speaking, undefined.
@@ -459,22 +459,16 @@
if normalize:
# any notable files that have changed case will already be handled
# above, so just check membership in the foldmap
- notefiles = set(
- (
- normalize(f, True, True)
- for f in notefiles
- if normcase(f) not in foldmap
- )
- )
- visit = set(
- (
- f
+ notefiles = {
+ normalize(f, True, True)
for f in notefiles
- if (
- f not in results and matchfn(f) and (f in dmap or not ignore(f))
- )
- )
- )
+ if normcase(f) not in foldmap
+ }
+ visit = {
+ f
+ for f in notefiles
+ if (f not in results and matchfn(f) and (f in dmap or not ignore(f)))
+ }
if not fresh_instance:
if matchalways:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/TODO.md Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,30 @@
+Octopus Merge Support
+=====================
+
+This will be moderately complicated, as we'll need to synthesize phony
+changeset entries to explode the octopus into "revisions" that only
+have two parents each. For today, we can probably just do something like
+
+ aaaaaaaaaaaaaaaaaaXX{20 bytes of exploded node's hex sha}
+
+where XX is a counter (so we could have as many as 255 parents in a
+git commit - more than I think we'd ever see.) That means that we can
+install some check in this extension to disallow checking out or
+otherwise interacting with the `aaaaaaaaaaaaaaaaaa` revisions.
+
+
+Interface Creation
+====================
+
+We at least need an interface definition for `changelog` in core that
+this extension can satisfy, and again for `basicstore`.
+
+
+Reason About Locking
+====================
+
+We should spend some time thinking hard about locking, especially on
+.git/index etc. We're probably adequately locking the _git_
+repository, but may not have enough locking correctness in places
+where hg does locking that git isn't aware of (notably the working
+copy, which I believe Git does not lock.)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,318 @@
+"""grant Mercurial the ability to operate on Git repositories. (EXPERIMENTAL)
+
+This is currently super experimental. It probably will consume your
+firstborn a la Rumpelstiltskin, etc.
+"""
+
+from __future__ import absolute_import
+
+import os
+
+from mercurial.i18n import _
+
+from mercurial import (
+ commands,
+ error,
+ extensions,
+ localrepo,
+ pycompat,
+ scmutil,
+ store,
+ util,
+)
+
+from . import (
+ dirstate,
+ gitlog,
+ gitutil,
+ index,
+)
+
+# TODO: extract an interface for this in core
+class gitstore(object): # store.basicstore):
+ def __init__(self, path, vfstype):
+ self.vfs = vfstype(path)
+ self.path = self.vfs.base
+ self.createmode = store._calcmode(self.vfs)
+ # above lines should go away in favor of:
+ # super(gitstore, self).__init__(path, vfstype)
+
+ self.git = gitutil.get_pygit2().Repository(
+ os.path.normpath(os.path.join(path, b'..', b'.git'))
+ )
+ self._progress_factory = lambda *args, **kwargs: None
+
+ @util.propertycache
+ def _db(self):
+ # We lazy-create the database because we want to thread a
+ # progress callback down to the indexing process if it's
+ # required, and we don't have a ui handle in makestore().
+ return index.get_index(self.git, self._progress_factory)
+
+ def join(self, f):
+ """Fake store.join method for git repositories.
+
+ For the most part, store.join is used for @storecache
+ decorators to invalidate caches when various files
+ change. We'll map the ones we care about, and ignore the rest.
+ """
+ if f in (b'00changelog.i', b'00manifest.i'):
+ # This is close enough: in order for the changelog cache
+ # to be invalidated, HEAD will have to change.
+ return os.path.join(self.path, b'HEAD')
+ elif f == b'lock':
+ # TODO: we probably want to map this to a git lock, I
+ # suspect index.lock. We should figure out what the
+ # most-alike file is in git-land. For now we're risking
+ # bad concurrency errors if another git client is used.
+ return os.path.join(self.path, b'hgit-bogus-lock')
+ elif f in (b'obsstore', b'phaseroots', b'narrowspec', b'bookmarks'):
+ return os.path.join(self.path, b'..', b'.hg', f)
+ raise NotImplementedError(b'Need to pick file for %s.' % f)
+
+ def changelog(self, trypending):
+ # TODO we don't have a plan for trypending in hg's git support yet
+ return gitlog.changelog(self.git, self._db)
+
+ def manifestlog(self, repo, storenarrowmatch):
+ # TODO handle storenarrowmatch and figure out if we need the repo arg
+ return gitlog.manifestlog(self.git, self._db)
+
+ def invalidatecaches(self):
+ pass
+
+ def write(self, tr=None):
+ # normally this handles things like fncache writes, which we don't have
+ pass
+
+
+def _makestore(orig, requirements, storebasepath, vfstype):
+ if b'git' in requirements:
+ if not os.path.exists(os.path.join(storebasepath, b'..', b'.git')):
+ raise error.Abort(
+ _(
+ b'repository specified git format in '
+ b'.hg/requires but has no .git directory'
+ )
+ )
+ # Check for presence of pygit2 only here. The assumption is that we'll
+ # run this code iff we'll later need pygit2.
+ if gitutil.get_pygit2() is None:
+ raise error.Abort(
+ _(
+ b'the git extension requires the Python '
+ b'pygit2 library to be installed'
+ )
+ )
+
+ return gitstore(storebasepath, vfstype)
+ return orig(requirements, storebasepath, vfstype)
+
+
+class gitfilestorage(object):
+ def file(self, path):
+ if path[0:1] == b'/':
+ path = path[1:]
+ return gitlog.filelog(self.store.git, self.store._db, path)
+
+
+def _makefilestorage(orig, requirements, features, **kwargs):
+ store = kwargs['store']
+ if isinstance(store, gitstore):
+ return gitfilestorage
+ return orig(requirements, features, **kwargs)
+
+
+def _setupdothg(ui, path):
+ dothg = os.path.join(path, b'.hg')
+ if os.path.exists(dothg):
+ ui.warn(_(b'git repo already initialized for hg\n'))
+ else:
+ os.mkdir(os.path.join(path, b'.hg'))
+ # TODO is it ok to extend .git/info/exclude like this?
+ with open(
+ os.path.join(path, b'.git', b'info', b'exclude'), 'ab'
+ ) as exclude:
+ exclude.write(b'\n.hg\n')
+ with open(os.path.join(dothg, b'requires'), 'wb') as f:
+ f.write(b'git\n')
+
+
+_BMS_PREFIX = 'refs/heads/'
+
+
+class gitbmstore(object):
+ def __init__(self, gitrepo):
+ self.gitrepo = gitrepo
+ self._aclean = True
+ self._active = gitrepo.references['HEAD'] # git head, not mark
+
+ def __contains__(self, name):
+ return (
+ _BMS_PREFIX + pycompat.fsdecode(name)
+ ) in self.gitrepo.references
+
+ def __iter__(self):
+ for r in self.gitrepo.listall_references():
+ if r.startswith(_BMS_PREFIX):
+ yield pycompat.fsencode(r[len(_BMS_PREFIX) :])
+
+ def __getitem__(self, k):
+ return (
+ self.gitrepo.references[_BMS_PREFIX + pycompat.fsdecode(k)]
+ .peel()
+ .id.raw
+ )
+
+ def get(self, k, default=None):
+ try:
+ if k in self:
+ return self[k]
+ return default
+ except gitutil.get_pygit2().InvalidSpecError:
+ return default
+
+ @property
+ def active(self):
+ h = self.gitrepo.references['HEAD']
+ if not isinstance(h.target, str) or not h.target.startswith(
+ _BMS_PREFIX
+ ):
+ return None
+ return pycompat.fsencode(h.target[len(_BMS_PREFIX) :])
+
+ @active.setter
+ def active(self, mark):
+ githead = mark is not None and (_BMS_PREFIX + mark) or None
+ if githead is not None and githead not in self.gitrepo.references:
+ raise AssertionError(b'bookmark %s does not exist!' % mark)
+
+ self._active = githead
+ self._aclean = False
+
+ def _writeactive(self):
+ if self._aclean:
+ return
+ self.gitrepo.references.create('HEAD', self._active, True)
+ self._aclean = True
+
+ def names(self, node):
+ r = []
+ for ref in self.gitrepo.listall_references():
+ if not ref.startswith(_BMS_PREFIX):
+ continue
+ if self.gitrepo.references[ref].peel().id.raw != node:
+ continue
+ r.append(pycompat.fsencode(ref[len(_BMS_PREFIX) :]))
+ return r
+
+ # Cleanup opportunity: this is *identical* to core's bookmarks store.
+ def expandname(self, bname):
+ if bname == b'.':
+ if self.active:
+ return self.active
+ raise error.RepoLookupError(_(b"no active bookmark"))
+ return bname
+
+ def applychanges(self, repo, tr, changes):
+ """Apply a list of changes to bookmarks
+ """
+ # TODO: this should respect transactions, but that's going to
+ # require enlarging the gitbmstore to know how to do in-memory
+ # temporary writes and read those back prior to transaction
+ # finalization.
+ for name, node in changes:
+ if node is None:
+ self.gitrepo.references.delete(
+ _BMS_PREFIX + pycompat.fsdecode(name)
+ )
+ else:
+ self.gitrepo.references.create(
+ _BMS_PREFIX + pycompat.fsdecode(name),
+ gitutil.togitnode(node),
+ force=True,
+ )
+
+ def checkconflict(self, mark, force=False, target=None):
+ githead = _BMS_PREFIX + mark
+ cur = self.gitrepo.references['HEAD']
+ if githead in self.gitrepo.references and not force:
+ if target:
+ if self.gitrepo.references[githead] == target and target == cur:
+ # re-activating a bookmark
+ return []
+ # moving a bookmark - forward?
+ raise NotImplementedError
+ raise error.Abort(
+ _(b"bookmark '%s' already exists (use -f to force)") % mark
+ )
+ if len(mark) > 3 and not force:
+ try:
+ shadowhash = scmutil.isrevsymbol(self._repo, mark)
+ except error.LookupError: # ambiguous identifier
+ shadowhash = False
+ if shadowhash:
+ self._repo.ui.warn(
+ _(
+ b"bookmark %s matches a changeset hash\n"
+ b"(did you leave a -r out of an 'hg bookmark' "
+ b"command?)\n"
+ )
+ % mark
+ )
+ return []
+
+
+def init(orig, ui, dest=b'.', **opts):
+ if opts.get('git', False):
+ path = os.path.abspath(dest)
+ # TODO: walk up looking for the git repo
+ _setupdothg(ui, path)
+ return 0
+ return orig(ui, dest=dest, **opts)
+
+
+def reposetup(ui, repo):
+ if repo.local() and isinstance(repo.store, gitstore):
+ orig = repo.__class__
+ repo.store._progress_factory = repo.ui.makeprogress
+
+ class gitlocalrepo(orig):
+ def _makedirstate(self):
+ # TODO narrow support here
+ return dirstate.gitdirstate(
+ self.ui, self.vfs.base, self.store.git
+ )
+
+ def commit(self, *args, **kwargs):
+ ret = orig.commit(self, *args, **kwargs)
+ tid = self.store.git[gitutil.togitnode(ret)].tree.id
+ # DANGER! This will flush any writes staged to the
+ # index in Git, but we're sidestepping the index in a
+ # way that confuses git when we commit. Alas.
+ self.store.git.index.read_tree(tid)
+ self.store.git.index.write()
+ return ret
+
+ @property
+ def _bookmarks(self):
+ return gitbmstore(self.store.git)
+
+ repo.__class__ = gitlocalrepo
+ return repo
+
+
+def _featuresetup(ui, supported):
+ # don't die on seeing a repo with the git requirement
+ supported |= {b'git'}
+
+
+def extsetup(ui):
+ extensions.wrapfunction(localrepo, b'makestore', _makestore)
+ extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage)
+ # Inject --git flag for `hg init`
+ entry = extensions.wrapcommand(commands.table, b'init', init)
+ entry[1].extend(
+ [(b'', b'git', None, b'setup up a git repository instead of hg')]
+ )
+ localrepo.featuresetupfuncs.add(_featuresetup)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/dirstate.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,307 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import os
+
+from mercurial import (
+ error,
+ extensions,
+ match as matchmod,
+ node as nodemod,
+ pycompat,
+ scmutil,
+ util,
+)
+from mercurial.interfaces import (
+ dirstate as intdirstate,
+ util as interfaceutil,
+)
+
+from . import gitutil
+
+pygit2 = gitutil.get_pygit2()
+
+
+def readpatternfile(orig, filepath, warn, sourceinfo=False):
+ if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
+ return orig(filepath, warn, sourceinfo=False)
+ result = []
+ warnings = []
+ with open(filepath, b'rb') as fp:
+ for l in fp:
+ l = l.strip()
+ if not l or l.startswith(b'#'):
+ continue
+ if l.startswith(b'!'):
+ warnings.append(b'unsupported ignore pattern %s' % l)
+ continue
+ if l.startswith(b'/'):
+ result.append(b'rootglob:' + l[1:])
+ else:
+ result.append(b'relglob:' + l)
+ return result, warnings
+
+
+extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
+
+
+_STATUS_MAP = {}
+if pygit2:
+ _STATUS_MAP = {
+ pygit2.GIT_STATUS_CONFLICTED: b'm',
+ pygit2.GIT_STATUS_CURRENT: b'n',
+ pygit2.GIT_STATUS_IGNORED: b'?',
+ pygit2.GIT_STATUS_INDEX_DELETED: b'r',
+ pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
+ pygit2.GIT_STATUS_INDEX_NEW: b'a',
+ pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
+ pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
+ pygit2.GIT_STATUS_WT_DELETED: b'r',
+ pygit2.GIT_STATUS_WT_MODIFIED: b'n',
+ pygit2.GIT_STATUS_WT_NEW: b'?',
+ pygit2.GIT_STATUS_WT_RENAMED: b'a',
+ pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
+ pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
+ pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: 'm',
+ }
+
+
+@interfaceutil.implementer(intdirstate.idirstate)
+class gitdirstate(object):
+ def __init__(self, ui, root, gitrepo):
+ self._ui = ui
+ self._root = os.path.dirname(root)
+ self.git = gitrepo
+ self._plchangecallbacks = {}
+
+ def p1(self):
+ try:
+ return self.git.head.peel().id.raw
+ except pygit2.GitError:
+ # Typically happens when peeling HEAD fails, as in an
+ # empty repository.
+ return nodemod.nullid
+
+ def p2(self):
+ # TODO: MERGE_HEAD? something like that, right?
+ return nodemod.nullid
+
+ def setparents(self, p1, p2=nodemod.nullid):
+ assert p2 == nodemod.nullid, b'TODO merging support'
+ self.git.head.set_target(gitutil.togitnode(p1))
+
+ @util.propertycache
+ def identity(self):
+ return util.filestat.frompath(
+ os.path.join(self._root, b'.git', b'index')
+ )
+
+ def branch(self):
+ return b'default'
+
+ def parents(self):
+ # TODO how on earth do we find p2 if a merge is in flight?
+ return self.p1(), nodemod.nullid
+
+ def __iter__(self):
+ return (pycompat.fsencode(f.path) for f in self.git.index)
+
+ def items(self):
+ for ie in self.git.index:
+ yield ie.path, None # value should be a dirstatetuple
+
+ # py2,3 compat forward
+ iteritems = items
+
+ def __getitem__(self, filename):
+ try:
+ gs = self.git.status_file(filename)
+ except KeyError:
+ return b'?'
+ return _STATUS_MAP[gs]
+
+ def __contains__(self, filename):
+ try:
+ gs = self.git.status_file(filename)
+ return _STATUS_MAP[gs] != b'?'
+ except KeyError:
+ return False
+
+ def status(self, match, subrepos, ignored, clean, unknown):
+ # TODO handling of clean files - can we get that from git.status()?
+ modified, added, removed, deleted, unknown, ignored, clean = (
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ )
+ gstatus = self.git.status()
+ for path, status in gstatus.items():
+ path = pycompat.fsencode(path)
+ if status == pygit2.GIT_STATUS_IGNORED:
+ if path.endswith(b'/'):
+ continue
+ ignored.append(path)
+ elif status in (
+ pygit2.GIT_STATUS_WT_MODIFIED,
+ pygit2.GIT_STATUS_INDEX_MODIFIED,
+ pygit2.GIT_STATUS_WT_MODIFIED
+ | pygit2.GIT_STATUS_INDEX_MODIFIED,
+ ):
+ modified.append(path)
+ elif status == pygit2.GIT_STATUS_INDEX_NEW:
+ added.append(path)
+ elif status == pygit2.GIT_STATUS_WT_NEW:
+ unknown.append(path)
+ elif status == pygit2.GIT_STATUS_WT_DELETED:
+ deleted.append(path)
+ elif status == pygit2.GIT_STATUS_INDEX_DELETED:
+ removed.append(path)
+ else:
+ raise error.Abort(
+ b'unhandled case: status for %r is %r' % (path, status)
+ )
+
+ # TODO are we really always sure of status here?
+ return (
+ False,
+ scmutil.status(
+ modified, added, removed, deleted, unknown, ignored, clean
+ ),
+ )
+
+ def flagfunc(self, buildfallback):
+ # TODO we can do better
+ return buildfallback()
+
+ def getcwd(self):
+ # TODO is this a good way to do this?
+ return os.path.dirname(
+ os.path.dirname(pycompat.fsencode(self.git.path))
+ )
+
+ def normalize(self, path):
+ normed = util.normcase(path)
+ assert normed == path, b"TODO handling of case folding: %s != %s" % (
+ normed,
+ path,
+ )
+ return path
+
+ @property
+ def _checklink(self):
+ return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
+
+ def copies(self):
+ # TODO support copies?
+ return {}
+
+ # # TODO what the heck is this
+ _filecache = set()
+
+ def pendingparentchange(self):
+ # TODO: we need to implement the context manager bits and
+ # correctly stage/revert index edits.
+ return False
+
+ def write(self, tr):
+ # TODO: call parent change callbacks
+
+ if tr:
+
+ def writeinner(category):
+ self.git.index.write()
+
+ tr.addpending(b'gitdirstate', writeinner)
+ else:
+ self.git.index.write()
+
+ def pathto(self, f, cwd=None):
+ if cwd is None:
+ cwd = self.getcwd()
+ # TODO core dirstate does something about slashes here
+ assert isinstance(f, bytes)
+ r = util.pathto(self._root, cwd, f)
+ return r
+
+ def matches(self, match):
+ for x in self.git.index:
+ p = pycompat.fsencode(x.path)
+ if match(p):
+ yield p
+
+ def normal(self, f, parentfiledata=None):
+ """Mark a file normal and clean."""
+ # TODO: for now we just let libgit2 re-stat the file. We can
+ # clearly do better.
+
+ def normallookup(self, f):
+ """Mark a file normal, but possibly dirty."""
+ # TODO: for now we just let libgit2 re-stat the file. We can
+ # clearly do better.
+
+ def walk(self, match, subrepos, unknown, ignored, full=True):
+ # TODO: we need to use .status() and not iterate the index,
+ # because the index doesn't force a re-walk and so `hg add` of
+ # a new file without an intervening call to status will
+ # silently do nothing.
+ r = {}
+ cwd = self.getcwd()
+ for path, status in self.git.status().items():
+ if path.startswith('.hg/'):
+ continue
+ path = pycompat.fsencode(path)
+ if not match(path):
+ continue
+ # TODO construct the stat info from the status object?
+ try:
+ s = os.stat(os.path.join(cwd, path))
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ continue
+ r[path] = s
+ return r
+
+ def savebackup(self, tr, backupname):
+ # TODO: figure out a strategy for saving index backups.
+ pass
+
+ def restorebackup(self, tr, backupname):
+ # TODO: figure out a strategy for saving index backups.
+ pass
+
+ def add(self, f):
+ self.git.index.add(pycompat.fsdecode(f))
+
+ def drop(self, f):
+ self.git.index.remove(pycompat.fsdecode(f))
+
+ def remove(self, f):
+ self.git.index.remove(pycompat.fsdecode(f))
+
+ def copied(self, path):
+ # TODO: track copies?
+ return None
+
+ @contextlib.contextmanager
+ def parentchange(self):
+ # TODO: track this maybe?
+ yield
+
+ def addparentchangecallback(self, category, callback):
+ # TODO: should this be added to the dirstate interface?
+ self._plchangecallbacks[category] = callback
+
+ def clearbackup(self, tr, backupname):
+ # TODO
+ pass
+
+ def setbranch(self, branch):
+ raise error.Abort(
+ b'git repos do not support branches. try using bookmarks'
+ )
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/gitlog.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,466 @@
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+
+from mercurial import (
+ ancestor,
+ changelog as hgchangelog,
+ dagop,
+ encoding,
+ error,
+ manifest,
+ node as nodemod,
+ pycompat,
+)
+from mercurial.interfaces import (
+ repository,
+ util as interfaceutil,
+)
+from mercurial.utils import stringutil
+from . import (
+ gitutil,
+ index,
+ manifest as gitmanifest,
+)
+
+pygit2 = gitutil.get_pygit2()
+
+
+class baselog(object): # revlog.revlog):
+ """Common implementations between changelog and manifestlog."""
+
+ def __init__(self, gr, db):
+ self.gitrepo = gr
+ self._db = db
+
+ def __len__(self):
+ return int(
+ self._db.execute('SELECT COUNT(*) FROM changelog').fetchone()[0]
+ )
+
+ def rev(self, n):
+ if n == nodemod.nullid:
+ return -1
+ t = self._db.execute(
+ 'SELECT rev FROM changelog WHERE node = ?', (gitutil.togitnode(n),)
+ ).fetchone()
+ if t is None:
+ raise error.LookupError(n, b'00changelog.i', _(b'no node %d'))
+ return t[0]
+
+ def node(self, r):
+ if r == nodemod.nullrev:
+ return nodemod.nullid
+ t = self._db.execute(
+ 'SELECT node FROM changelog WHERE rev = ?', (r,)
+ ).fetchone()
+ if t is None:
+ raise error.LookupError(r, b'00changelog.i', _(b'no node'))
+ return nodemod.bin(t[0])
+
+ def hasnode(self, n):
+ t = self._db.execute(
+ 'SELECT node FROM changelog WHERE node = ?', (n,)
+ ).fetchone()
+ return t is not None
+
+
+class baselogindex(object):
+ def __init__(self, log):
+ self._log = log
+
+ def has_node(self, n):
+ return self._log.rev(n) != -1
+
+ def __len__(self):
+ return len(self._log)
+
+ def __getitem__(self, idx):
+ p1rev, p2rev = self._log.parentrevs(idx)
+ # TODO: it's messy that the index leaks so far out of the
+ # storage layer that we have to implement things like reading
+ # this raw tuple, which exposes revlog internals.
+ return (
+ # Pretend offset is just the index, since we don't really care.
+ idx,
+ # Same with lengths
+ idx, # length
+ idx, # rawsize
+ -1, # delta base
+ idx, # linkrev TODO is this right?
+ p1rev,
+ p2rev,
+ self._log.node(idx),
+ )
+
+
+# TODO: an interface for the changelog type?
+class changelog(baselog):
+ def __contains__(self, rev):
+ try:
+ self.node(rev)
+ return True
+ except error.LookupError:
+ return False
+
+ def __iter__(self):
+ return iter(pycompat.xrange(len(self)))
+
+ @property
+ def filteredrevs(self):
+ # TODO: we should probably add a refs/hg/ namespace for hidden
+ # heads etc, but that's an idea for later.
+ return set()
+
+ @property
+ def index(self):
+ return baselogindex(self)
+
+ @property
+ def nodemap(self):
+ r = {
+ nodemod.bin(v[0]): v[1]
+ for v in self._db.execute('SELECT node, rev FROM changelog')
+ }
+ r[nodemod.nullid] = nodemod.nullrev
+ return r
+
+ def tip(self):
+ t = self._db.execute(
+ 'SELECT node FROM changelog ORDER BY rev DESC LIMIT 1'
+ ).fetchone()
+ if t:
+ return nodemod.bin(t[0])
+ return nodemod.nullid
+
+ def revs(self, start=0, stop=None):
+ if stop is None:
+ stop = self.tip()
+ t = self._db.execute(
+ 'SELECT rev FROM changelog '
+ 'WHERE rev >= ? AND rev <= ? '
+ 'ORDER BY REV ASC',
+ (start, stop),
+ )
+ return (int(r[0]) for r in t)
+
+ def _partialmatch(self, id):
+ if nodemod.wdirhex.startswith(id):
+ raise error.WdirUnsupported
+ candidates = [
+ nodemod.bin(x[0])
+ for x in self._db.execute(
+ 'SELECT node FROM changelog WHERE node LIKE ?', (id + b'%',)
+ )
+ ]
+ if nodemod.nullhex.startswith(id):
+ candidates.append(nodemod.nullid)
+ if len(candidates) > 1:
+ raise error.AmbiguousPrefixLookupError(
+ id, b'00changelog.i', _(b'ambiguous identifier')
+ )
+ if candidates:
+ return candidates[0]
+ return None
+
+ def flags(self, rev):
+ return 0
+
+ def shortest(self, node, minlength=1):
+ nodehex = nodemod.hex(node)
+ for attempt in pycompat.xrange(minlength, len(nodehex) + 1):
+ candidate = nodehex[:attempt]
+ matches = int(
+ self._db.execute(
+ 'SELECT COUNT(*) FROM changelog WHERE node LIKE ?',
+ (pycompat.sysstr(candidate + b'%'),),
+ ).fetchone()[0]
+ )
+ if matches == 1:
+ return candidate
+ return nodehex
+
+ def headrevs(self, revs=None):
+ realheads = [
+ int(x[0])
+ for x in self._db.execute(
+ 'SELECT rev FROM changelog '
+ 'INNER JOIN heads ON changelog.node = heads.node'
+ )
+ ]
+ if revs:
+ return sorted([r for r in revs if r in realheads])
+ return sorted(realheads)
+
+ def changelogrevision(self, nodeorrev):
+ # Ensure we have a node id
+ if isinstance(nodeorrev, int):
+ n = self.node(nodeorrev)
+ else:
+ n = nodeorrev
+ # handle looking up nullid
+ if n == nodemod.nullid:
+ return hgchangelog._changelogrevision(extra={})
+ hn = gitutil.togitnode(n)
+ # We've got a real commit!
+ files = [
+ r[0]
+ for r in self._db.execute(
+ 'SELECT filename FROM changedfiles '
+ 'WHERE node = ? and filenode != ?',
+ (hn, gitutil.nullgit),
+ )
+ ]
+ filesremoved = [
+ r[0]
+ for r in self._db.execute(
+ 'SELECT filename FROM changedfiles '
+ 'WHERE node = ? and filenode = ?',
+ (hn, nodemod.nullhex),
+ )
+ ]
+ c = self.gitrepo[hn]
+ return hgchangelog._changelogrevision(
+ manifest=n, # pretend manifest the same as the commit node
+ user=b'%s <%s>'
+ % (c.author.name.encode('utf8'), c.author.email.encode('utf8')),
+ date=(c.author.time, -c.author.offset * 60),
+ files=files,
+ # TODO filesadded in the index
+ filesremoved=filesremoved,
+ description=c.message.encode('utf8'),
+ # TODO do we want to handle extra? how?
+ extra={b'branch': b'default'},
+ )
+
+ def ancestors(self, revs, stoprev=0, inclusive=False):
+ revs = list(revs)
+ tip = self.rev(self.tip())
+ for r in revs:
+ if r > tip:
+ raise IndexError(b'Invalid rev %r' % r)
+ return ancestor.lazyancestors(
+ self.parentrevs, revs, stoprev=stoprev, inclusive=inclusive
+ )
+
+ # Cleanup opportunity: this is *identical* to the revlog.py version
+ def descendants(self, revs):
+ return dagop.descendantrevs(revs, self.revs, self.parentrevs)
+
+ def reachableroots(self, minroot, heads, roots, includepath=False):
+ return dagop._reachablerootspure(
+ self.parentrevs, minroot, roots, heads, includepath
+ )
+
+ # Cleanup opportunity: this is *identical* to the revlog.py version
+ def isancestor(self, a, b):
+ a, b = self.rev(a), self.rev(b)
+ return self.isancestorrev(a, b)
+
+ # Cleanup opportunity: this is *identical* to the revlog.py version
+ def isancestorrev(self, a, b):
+ if a == nodemod.nullrev:
+ return True
+ elif a == b:
+ return True
+ elif a > b:
+ return False
+ return bool(self.reachableroots(a, [b], [a], includepath=False))
+
+ def parentrevs(self, rev):
+ n = self.node(rev)
+ hn = gitutil.togitnode(n)
+ c = self.gitrepo[hn]
+ p1 = p2 = nodemod.nullrev
+ if c.parents:
+ p1 = self.rev(c.parents[0].id.raw)
+ if len(c.parents) > 2:
+ raise error.Abort(b'TODO octopus merge handling')
+ if len(c.parents) == 2:
+ p2 = self.rev(c.parents[1].id.raw)
+ return p1, p2
+
+ # Private method is used at least by the tags code.
+ _uncheckedparentrevs = parentrevs
+
+ def commonancestorsheads(self, a, b):
+ # TODO the revlog verson of this has a C path, so we probably
+ # need to optimize this...
+ a, b = self.rev(a), self.rev(b)
+ return [
+ self.node(n)
+ for n in ancestor.commonancestorsheads(self.parentrevs, a, b)
+ ]
+
+ def branchinfo(self, rev):
+ """Git doesn't do named branches, so just put everything on default."""
+ return b'default', False
+
+ def delayupdate(self, tr):
+ # TODO: I think we can elide this because we're just dropping
+ # an object in the git repo?
+ pass
+
+ def add(
+ self,
+ manifest,
+ files,
+ desc,
+ transaction,
+ p1,
+ p2,
+ user,
+ date=None,
+ extra=None,
+ p1copies=None,
+ p2copies=None,
+ filesadded=None,
+ filesremoved=None,
+ ):
+ parents = []
+ hp1, hp2 = gitutil.togitnode(p1), gitutil.togitnode(p2)
+ if p1 != nodemod.nullid:
+ parents.append(hp1)
+ if p2 and p2 != nodemod.nullid:
+ parents.append(hp2)
+ assert date is not None
+ timestamp, tz = date
+ sig = pygit2.Signature(
+ encoding.unifromlocal(stringutil.person(user)),
+ encoding.unifromlocal(stringutil.email(user)),
+ timestamp,
+ -(tz // 60),
+ )
+ oid = self.gitrepo.create_commit(
+ None, sig, sig, desc, gitutil.togitnode(manifest), parents
+ )
+ # Set up an internal reference to force the commit into the
+ # changelog. Hypothetically, we could even use this refs/hg/
+ # namespace to allow for anonymous heads on git repos, which
+ # would be neat.
+ self.gitrepo.references.create(
+ 'refs/hg/internal/latest-commit', oid, force=True
+ )
+ # Reindex now to pick up changes. We omit the progress
+ # callback because this will be very quick.
+ index._index_repo(self.gitrepo, self._db)
+ return oid.raw
+
+
+class manifestlog(baselog):
+ def __getitem__(self, node):
+ return self.get(b'', node)
+
+ def get(self, relpath, node):
+ if node == nodemod.nullid:
+ # TODO: this should almost certainly be a memgittreemanifestctx
+ return manifest.memtreemanifestctx(self, relpath)
+ commit = self.gitrepo[gitutil.togitnode(node)]
+ t = commit.tree
+ if relpath:
+ parts = relpath.split(b'/')
+ for p in parts:
+ te = t[p]
+ t = self.gitrepo[te.id]
+ return gitmanifest.gittreemanifestctx(self.gitrepo, t)
+
+
+@interfaceutil.implementer(repository.ifilestorage)
+class filelog(baselog):
+ def __init__(self, gr, db, path):
+ super(filelog, self).__init__(gr, db)
+ assert isinstance(path, bytes)
+ self.path = path
+
+ def read(self, node):
+ if node == nodemod.nullid:
+ return b''
+ return self.gitrepo[gitutil.togitnode(node)].data
+
+ def lookup(self, node):
+ if len(node) not in (20, 40):
+ node = int(node)
+ if isinstance(node, int):
+ assert False, b'todo revnums for nodes'
+ if len(node) == 40:
+ node = nodemod.bin(node)
+ hnode = gitutil.togitnode(node)
+ if hnode in self.gitrepo:
+ return node
+ raise error.LookupError(self.path, node, _(b'no match found'))
+
+ def cmp(self, node, text):
+ """Returns True if text is different than content at `node`."""
+ return self.read(node) != text
+
+ def add(self, text, meta, transaction, link, p1=None, p2=None):
+ assert not meta # Should we even try to handle this?
+ return self.gitrepo.create_blob(text).raw
+
+ def __iter__(self):
+ for clrev in self._db.execute(
+ '''
+SELECT rev FROM changelog
+INNER JOIN changedfiles ON changelog.node = changedfiles.node
+WHERE changedfiles.filename = ? AND changedfiles.filenode != ?
+ ''',
+ (pycompat.fsdecode(self.path), gitutil.nullgit),
+ ):
+ yield clrev[0]
+
+ def linkrev(self, fr):
+ return fr
+
+ def rev(self, node):
+ row = self._db.execute(
+ '''
+SELECT rev FROM changelog
+INNER JOIN changedfiles ON changelog.node = changedfiles.node
+WHERE changedfiles.filename = ? AND changedfiles.filenode = ?''',
+ (pycompat.fsdecode(self.path), gitutil.togitnode(node)),
+ ).fetchone()
+ if row is None:
+ raise error.LookupError(self.path, node, _(b'no such node'))
+ return int(row[0])
+
+ def node(self, rev):
+ maybe = self._db.execute(
+ '''SELECT filenode FROM changedfiles
+INNER JOIN changelog ON changelog.node = changedfiles.node
+WHERE changelog.rev = ? AND filename = ?
+''',
+ (rev, pycompat.fsdecode(self.path)),
+ ).fetchone()
+ if maybe is None:
+ raise IndexError('gitlog %r out of range %d' % (self.path, rev))
+ return nodemod.bin(maybe[0])
+
+ def parents(self, node):
+ gn = gitutil.togitnode(node)
+ gp = pycompat.fsdecode(self.path)
+ ps = []
+ for p in self._db.execute(
+ '''SELECT p1filenode, p2filenode FROM changedfiles
+WHERE filenode = ? AND filename = ?
+''',
+ (gn, gp),
+ ).fetchone():
+ if p is None:
+ commit = self._db.execute(
+ "SELECT node FROM changedfiles "
+ "WHERE filenode = ? AND filename = ?",
+ (gn, gp),
+ ).fetchone()[0]
+ # This filelog is missing some data. Build the
+ # filelog, then recurse (which will always find data).
+ if pycompat.ispy3:
+ commit = commit.decode('ascii')
+ index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn)
+ return self.parents(node)
+ else:
+ ps.append(nodemod.bin(p))
+ return ps
+
+ def renamed(self, node):
+ # TODO: renames/copies
+ return False
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/gitutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,40 @@
+"""utilities to assist in working with pygit2"""
+from __future__ import absolute_import
+
+from mercurial.node import bin, hex, nullid
+
+from mercurial import pycompat
+
+pygit2_module = None
+
+
+def get_pygit2():
+ global pygit2_module
+ if pygit2_module is None:
+ try:
+ import pygit2 as pygit2_module
+
+ pygit2_module.InvalidSpecError
+ except (ImportError, AttributeError):
+ pass
+ return pygit2_module
+
+
+def togitnode(n):
+ """Wrapper to convert a Mercurial binary node to a unicode hexlified node.
+
+ pygit2 and sqlite both need nodes as strings, not bytes.
+ """
+ assert len(n) == 20
+ return pycompat.sysstr(hex(n))
+
+
+def fromgitnode(n):
+ """Opposite of togitnode."""
+ assert len(n) == 40
+ if pycompat.ispy3:
+ return bin(n.encode('ascii'))
+ return bin(n)
+
+
+nullgit = togitnode(nullid)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/index.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,350 @@
+from __future__ import absolute_import
+
+import collections
+import os
+import sqlite3
+
+from mercurial.i18n import _
+
+from mercurial import (
+ encoding,
+ error,
+ node as nodemod,
+ pycompat,
+)
+
+from . import gitutil
+
+
+pygit2 = gitutil.get_pygit2()
+
+_CURRENT_SCHEMA_VERSION = 1
+_SCHEMA = (
+ """
+CREATE TABLE refs (
+ -- node and name are unique together. There may be more than one name for
+ -- a given node, and there may be no name at all for a given node (in the
+ -- case of an anonymous hg head).
+ node TEXT NOT NULL,
+ name TEXT
+);
+
+-- The "possible heads" of the repository, which we use to figure out
+-- if we need to re-walk the changelog.
+CREATE TABLE possible_heads (
+ node TEXT NOT NULL
+);
+
+-- The topological heads of the changelog, which hg depends on.
+CREATE TABLE heads (
+ node TEXT NOT NULL
+);
+
+-- A total ordering of the changelog
+CREATE TABLE changelog (
+ rev INTEGER NOT NULL PRIMARY KEY,
+ node TEXT NOT NULL,
+ p1 TEXT,
+ p2 TEXT
+);
+
+CREATE UNIQUE INDEX changelog_node_idx ON changelog(node);
+CREATE UNIQUE INDEX changelog_node_rev_idx ON changelog(rev, node);
+
+-- Changed files for each commit, which lets us dynamically build
+-- filelogs.
+CREATE TABLE changedfiles (
+ node TEXT NOT NULL,
+ filename TEXT NOT NULL,
+ -- 40 zeroes for deletions
+ filenode TEXT NOT NULL,
+-- to handle filelog parentage:
+ p1node TEXT,
+ p1filenode TEXT,
+ p2node TEXT,
+ p2filenode TEXT
+);
+
+CREATE INDEX changedfiles_nodes_idx
+ ON changedfiles(node);
+
+PRAGMA user_version=%d
+"""
+ % _CURRENT_SCHEMA_VERSION
+)
+
+
+def _createdb(path):
+ # print('open db', path)
+ # import traceback
+ # traceback.print_stack()
+ db = sqlite3.connect(encoding.strfromlocal(path))
+ db.text_factory = bytes
+
+ res = db.execute('PRAGMA user_version').fetchone()[0]
+
+ # New database.
+ if res == 0:
+ for statement in _SCHEMA.split(';'):
+ db.execute(statement.strip())
+
+ db.commit()
+
+ elif res == _CURRENT_SCHEMA_VERSION:
+ pass
+
+ else:
+ raise error.Abort(_(b'sqlite database has unrecognized version'))
+
+ db.execute('PRAGMA journal_mode=WAL')
+
+ return db
+
+
+_OUR_ORDER = ()
+if pygit2:
+ _OUR_ORDER = (
+ pygit2.GIT_SORT_TOPOLOGICAL
+ | pygit2.GIT_SORT_TIME
+ | pygit2.GIT_SORT_REVERSE
+ )
+
+_DIFF_FLAGS = 1 << 21 # GIT_DIFF_FORCE_BINARY, which isn't exposed by pygit2
+
+
+def _find_nearest_ancestor_introducing_node(
+ db, gitrepo, file_path, walk_start, filenode
+):
+ """Find the nearest ancestor that introduces a file node.
+
+ Args:
+ db: a handle to our sqlite database.
+ gitrepo: A pygit2.Repository instance.
+ file_path: the path of a file in the repo
+ walk_start: a pygit2.Oid that is a commit where we should start walking
+ for our nearest ancestor.
+
+ Returns:
+ A hexlified SHA that is the commit ID of the next-nearest parent.
+ """
+ assert isinstance(file_path, str), 'file_path must be str, got %r' % type(
+ file_path
+ )
+ assert isinstance(filenode, str), 'filenode must be str, got %r' % type(
+ filenode
+ )
+ parent_options = {
+ row[0].decode('ascii')
+ for row in db.execute(
+ 'SELECT node FROM changedfiles '
+ 'WHERE filename = ? AND filenode = ?',
+ (file_path, filenode),
+ )
+ }
+ inner_walker = gitrepo.walk(walk_start, _OUR_ORDER)
+ for w in inner_walker:
+ if w.id.hex in parent_options:
+ return w.id.hex
+ raise error.ProgrammingError(
+ 'Unable to find introducing commit for %s node %s from %s',
+ (file_path, filenode, walk_start),
+ )
+
+
+def fill_in_filelog(gitrepo, db, startcommit, path, startfilenode):
+ """Given a starting commit and path, fill in a filelog's parent pointers.
+
+ Args:
+ gitrepo: a pygit2.Repository
+ db: a handle to our sqlite database
+ startcommit: a hexlified node id for the commit to start at
+ path: the path of the file whose parent pointers we should fill in.
+ filenode: the hexlified node id of the file at startcommit
+
+ TODO: make filenode optional
+ """
+ assert isinstance(
+ startcommit, str
+ ), 'startcommit must be str, got %r' % type(startcommit)
+ assert isinstance(
+ startfilenode, str
+ ), 'startfilenode must be str, got %r' % type(startfilenode)
+ visit = collections.deque([(startcommit, startfilenode)])
+ while visit:
+ cnode, filenode = visit.popleft()
+ commit = gitrepo[cnode]
+ parents = []
+ for parent in commit.parents:
+ t = parent.tree
+ for comp in path.split('/'):
+ try:
+ t = gitrepo[t[comp].id]
+ except KeyError:
+ break
+ else:
+ introducer = _find_nearest_ancestor_introducing_node(
+ db, gitrepo, path, parent.id, t.id.hex
+ )
+ parents.append((introducer, t.id.hex))
+ p1node = p1fnode = p2node = p2fnode = gitutil.nullgit
+ for par, parfnode in parents:
+ found = int(
+ db.execute(
+ 'SELECT COUNT(*) FROM changedfiles WHERE '
+ 'node = ? AND filename = ? AND filenode = ? AND '
+ 'p1node NOT NULL',
+ (par, path, parfnode),
+ ).fetchone()[0]
+ )
+ if found == 0:
+ assert par is not None
+ visit.append((par, parfnode))
+ if parents:
+ p1node, p1fnode = parents[0]
+ if len(parents) == 2:
+ p2node, p2fnode = parents[1]
+ if len(parents) > 2:
+ raise error.ProgrammingError(
+ b"git support can't handle octopus merges"
+ )
+ db.execute(
+ 'UPDATE changedfiles SET '
+ 'p1node = ?, p1filenode = ?, p2node = ?, p2filenode = ? '
+ 'WHERE node = ? AND filename = ? AND filenode = ?',
+ (p1node, p1fnode, p2node, p2fnode, commit.id.hex, path, filenode),
+ )
+ db.commit()
+
+
+def _index_repo(gitrepo, db, progress_factory=lambda *args, **kwargs: None):
+ # Identify all references so we can tell the walker to visit all of them.
+ all_refs = gitrepo.listall_references()
+ possible_heads = set()
+ prog = progress_factory(b'refs')
+ for pos, ref in enumerate(all_refs):
+ if prog is not None:
+ prog.update(pos)
+ if not (
+ ref.startswith('refs/heads/') # local branch
+ or ref.startswith('refs/tags/') # tag
+ or ref.startswith('refs/remotes/') # remote branch
+ or ref.startswith('refs/hg/') # from this extension
+ ):
+ continue
+ try:
+ start = gitrepo.lookup_reference(ref).peel(pygit2.GIT_OBJ_COMMIT)
+ except ValueError:
+ # No commit to be found, so we don't care for hg's purposes.
+ continue
+ possible_heads.add(start.id)
+ # Optimization: if the list of heads hasn't changed, don't
+ # reindex, the changelog. This doesn't matter on small
+ # repositories, but on even moderately deep histories (eg cpython)
+ # this is a very important performance win.
+ #
+ # TODO: we should figure out how to incrementally index history
+ # (preferably by detecting rewinds!) so that we don't have to do a
+ # full changelog walk every time a new commit is created.
+ cache_heads = {x[0] for x in db.execute('SELECT node FROM possible_heads')}
+ walker = None
+ cur_cache_heads = {h.hex for h in possible_heads}
+ if cur_cache_heads == cache_heads:
+ return
+ for start in possible_heads:
+ if walker is None:
+ walker = gitrepo.walk(start, _OUR_ORDER)
+ else:
+ walker.push(start)
+
+ # Empty out the existing changelog. Even for large-ish histories
+ # we can do the top-level "walk all the commits" dance very
+ # quickly as long as we don't need to figure out the changed files
+ # list.
+ db.execute('DELETE FROM changelog')
+ if prog is not None:
+ prog.complete()
+ prog = progress_factory(b'commits')
+ # This walker is sure to visit all the revisions in history, but
+ # only once.
+ for pos, commit in enumerate(walker):
+ if prog is not None:
+ prog.update(pos)
+ p1 = p2 = nodemod.nullhex
+ if len(commit.parents) > 2:
+ raise error.ProgrammingError(
+ (
+ b"git support can't handle octopus merges, "
+ b"found a commit with %d parents :("
+ )
+ % len(commit.parents)
+ )
+ if commit.parents:
+ p1 = commit.parents[0].id.hex
+ if len(commit.parents) == 2:
+ p2 = commit.parents[1].id.hex
+ db.execute(
+ 'INSERT INTO changelog (rev, node, p1, p2) VALUES(?, ?, ?, ?)',
+ (pos, commit.id.hex, p1, p2),
+ )
+
+ num_changedfiles = db.execute(
+ "SELECT COUNT(*) from changedfiles WHERE node = ?",
+ (commit.id.hex,),
+ ).fetchone()[0]
+ if not num_changedfiles:
+ files = {}
+ # I *think* we only need to check p1 for changed files
+ # (and therefore linkrevs), because any node that would
+ # actually have this commit as a linkrev would be
+ # completely new in this rev.
+ p1 = commit.parents[0].id.hex if commit.parents else None
+ if p1 is not None:
+ patchgen = gitrepo.diff(p1, commit.id.hex, flags=_DIFF_FLAGS)
+ else:
+ patchgen = commit.tree.diff_to_tree(
+ swap=True, flags=_DIFF_FLAGS
+ )
+ new_files = (p.delta.new_file for p in patchgen)
+ files = {
+ nf.path: nf.id.hex
+ for nf in new_files
+ if nf.id.raw != nodemod.nullid
+ }
+ for p, n in files.items():
+ # We intentionally set NULLs for any file parentage
+ # information so it'll get demand-computed later. We
+ # used to do it right here, and it was _very_ slow.
+ db.execute(
+ 'INSERT INTO changedfiles ('
+ 'node, filename, filenode, p1node, p1filenode, p2node, '
+ 'p2filenode) VALUES(?, ?, ?, ?, ?, ?, ?)',
+ (commit.id.hex, p, n, None, None, None, None),
+ )
+ db.execute('DELETE FROM heads')
+ db.execute('DELETE FROM possible_heads')
+ for hid in possible_heads:
+ h = hid.hex
+ db.execute('INSERT INTO possible_heads (node) VALUES(?)', (h,))
+ haschild = db.execute(
+ 'SELECT COUNT(*) FROM changelog WHERE p1 = ? OR p2 = ?', (h, h)
+ ).fetchone()[0]
+ if not haschild:
+ db.execute('INSERT INTO heads (node) VALUES(?)', (h,))
+
+ db.commit()
+ if prog is not None:
+ prog.complete()
+
+
+def get_index(gitrepo, progress_factory=lambda *args, **kwargs: None):
+ cachepath = os.path.join(
+ pycompat.fsencode(gitrepo.path), b'..', b'.hg', b'cache'
+ )
+ if not os.path.exists(cachepath):
+ os.makedirs(cachepath)
+ dbpath = os.path.join(cachepath, b'git-commits.sqlite')
+ db = _createdb(dbpath)
+ # TODO check against gitrepo heads before doing a full index
+ # TODO thread a ui.progress call into this layer
+ _index_repo(gitrepo, db, progress_factory)
+ return db
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/git/manifest.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,297 @@
+from __future__ import absolute_import
+
+from mercurial import (
+ match as matchmod,
+ pathutil,
+ pycompat,
+ util,
+)
+from mercurial.interfaces import (
+ repository,
+ util as interfaceutil,
+)
+from . import gitutil
+
+
+pygit2 = gitutil.get_pygit2()
+
+
+@interfaceutil.implementer(repository.imanifestdict)
+class gittreemanifest(object):
+ """Expose git trees (and optionally a builder's overlay) as a manifestdict.
+
+ Very similar to mercurial.manifest.treemanifest.
+ """
+
+ def __init__(self, git_repo, root_tree, pending_changes):
+ """Initializer.
+
+ Args:
+ git_repo: The git_repo we're walking (required to look up child
+ trees).
+ root_tree: The root Git tree object for this manifest.
+ pending_changes: A dict in which pending changes will be
+ tracked. The enclosing memgittreemanifestctx will use this to
+ construct any required Tree objects in Git during it's
+ `write()` method.
+ """
+ self._git_repo = git_repo
+ self._tree = root_tree
+ if pending_changes is None:
+ pending_changes = {}
+ # dict of path: Optional[Tuple(node, flags)]
+ self._pending_changes = pending_changes
+
+ def _resolve_entry(self, path):
+ """Given a path, load its node and flags, or raise KeyError if missing.
+
+ This takes into account any pending writes in the builder.
+ """
+ upath = pycompat.fsdecode(path)
+ ent = None
+ if path in self._pending_changes:
+ val = self._pending_changes[path]
+ if val is None:
+ raise KeyError
+ return val
+ t = self._tree
+ comps = upath.split('/')
+ for comp in comps[:-1]:
+ te = self._tree[comp]
+ t = self._git_repo[te.id]
+ ent = t[comps[-1]]
+ if ent.filemode == pygit2.GIT_FILEMODE_BLOB:
+ flags = b''
+ elif ent.filemode == pygit2.GIT_FILEMODE_BLOB_EXECUTABLE:
+ flags = b'x'
+ elif ent.filemode == pygit2.GIT_FILEMODE_LINK:
+ flags = b'l'
+ else:
+ raise ValueError('unsupported mode %s' % oct(ent.filemode))
+ return ent.id.raw, flags
+
+ def __getitem__(self, path):
+ return self._resolve_entry(path)[0]
+
+ def find(self, path):
+ return self._resolve_entry(path)
+
+ def __len__(self):
+ return len(list(self.walk(matchmod.always())))
+
+ def __nonzero__(self):
+ try:
+ next(iter(self))
+ return True
+ except StopIteration:
+ return False
+
+ __bool__ = __nonzero__
+
+ def __contains__(self, path):
+ try:
+ self._resolve_entry(path)
+ return True
+ except KeyError:
+ return False
+
+ def iterkeys(self):
+ return self.walk(matchmod.always())
+
+ def keys(self):
+ return list(self.iterkeys())
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ def __setitem__(self, path, node):
+ self._pending_changes[path] = node, self.flags(path)
+
+ def __delitem__(self, path):
+ # TODO: should probably KeyError for already-deleted files?
+ self._pending_changes[path] = None
+
+ def filesnotin(self, other, match=None):
+ if match is not None:
+ match = matchmod.badmatch(match, lambda path, msg: None)
+ sm2 = set(other.walk(match))
+ return {f for f in self.walk(match) if f not in sm2}
+ return {f for f in self if f not in other}
+
+ @util.propertycache
+ def _dirs(self):
+ return pathutil.dirs(self)
+
+ def hasdir(self, dir):
+ return dir in self._dirs
+
+ def diff(self, other, match=None, clean=False):
+ # TODO
+ assert False
+
+ def setflag(self, path, flag):
+ node, unused_flag = self._resolve_entry(path)
+ self._pending_changes[path] = node, flag
+
+ def get(self, path, default=None):
+ try:
+ return self._resolve_entry(path)[0]
+ except KeyError:
+ return default
+
+ def flags(self, path):
+ try:
+ return self._resolve_entry(path)[1]
+ except KeyError:
+ return b''
+
+ def copy(self):
+ pass
+
+ def items(self):
+ for f in self:
+ # TODO: build a proper iterator version of this
+ yield self[f]
+
+ def iteritems(self):
+ return self.items()
+
+ def iterentries(self):
+ for f in self:
+ # TODO: build a proper iterator version of this
+ yield self._resolve_entry(f)
+
+ def text(self):
+ assert False # TODO can this method move out of the manifest iface?
+
+ def _walkonetree(self, tree, match, subdir):
+ for te in tree:
+ # TODO: can we prune dir walks with the matcher?
+ realname = subdir + pycompat.fsencode(te.name)
+ if te.type == r'tree':
+ for inner in self._walkonetree(
+ self._git_repo[te.id], match, realname + b'/'
+ ):
+ yield inner
+ if not match(realname):
+ continue
+ yield pycompat.fsencode(realname)
+
+ def walk(self, match):
+ # TODO: this is a very lazy way to merge in the pending
+ # changes. There is absolutely room for optimization here by
+ # being clever about walking over the sets...
+ baseline = set(self._walkonetree(self._tree, match, b''))
+ deleted = {p for p, v in self._pending_changes.items() if v is None}
+ pend = {p for p in self._pending_changes if match(p)}
+ return iter(sorted((baseline | pend) - deleted))
+
+
+@interfaceutil.implementer(repository.imanifestrevisionstored)
+class gittreemanifestctx(object):
+ def __init__(self, repo, gittree):
+ self._repo = repo
+ self._tree = gittree
+
+ def read(self):
+ return gittreemanifest(self._repo, self._tree, None)
+
+ def readfast(self, shallow=False):
+ return self.read()
+
+ def copy(self):
+ # NB: it's important that we return a memgittreemanifestctx
+ # because the caller expects a mutable manifest.
+ return memgittreemanifestctx(self._repo, self._tree)
+
+ def find(self, path):
+ self.read()[path]
+
+
+@interfaceutil.implementer(repository.imanifestrevisionwritable)
+class memgittreemanifestctx(object):
+ def __init__(self, repo, tree):
+ self._repo = repo
+ self._tree = tree
+ # dict of path: Optional[Tuple(node, flags)]
+ self._pending_changes = {}
+
+ def read(self):
+ return gittreemanifest(self._repo, self._tree, self._pending_changes)
+
+ def copy(self):
+ # TODO: if we have a builder in play, what should happen here?
+ # Maybe we can shuffle copy() into the immutable interface.
+ return memgittreemanifestctx(self._repo, self._tree)
+
+ def write(self, transaction, link, p1, p2, added, removed, match=None):
+ # We're not (for now, anyway) going to audit filenames, so we
+ # can ignore added and removed.
+
+ # TODO what does this match argument get used for? hopefully
+ # just narrow?
+ assert not match or isinstance(match, matchmod.alwaysmatcher)
+
+ touched_dirs = pathutil.dirs(list(self._pending_changes))
+ trees = {
+ b'': self._tree,
+ }
+ # path: treebuilder
+ builders = {
+ b'': self._repo.TreeBuilder(self._tree),
+ }
+ # get a TreeBuilder for every tree in the touched_dirs set
+ for d in sorted(touched_dirs, key=lambda x: (len(x), x)):
+ if d == b'':
+ # loaded root tree above
+ continue
+ comps = d.split(b'/')
+ full = b''
+ for part in comps:
+ parent = trees[full]
+ try:
+ new = self._repo[parent[pycompat.fsdecode(part)]]
+ except KeyError:
+ # new directory
+ new = None
+ full += b'/' + part
+ if new is not None:
+ # existing directory
+ trees[full] = new
+ builders[full] = self._repo.TreeBuilder(new)
+ else:
+ # new directory, use an empty dict to easily
+ # generate KeyError as any nested new dirs get
+ # created.
+ trees[full] = {}
+ builders[full] = self._repo.TreeBuilder()
+ for f, info in self._pending_changes.items():
+ if b'/' not in f:
+ dirname = b''
+ basename = f
+ else:
+ dirname, basename = f.rsplit(b'/', 1)
+ dirname = b'/' + dirname
+ if info is None:
+ builders[dirname].remove(pycompat.fsdecode(basename))
+ else:
+ n, fl = info
+ mode = {
+ b'': pygit2.GIT_FILEMODE_BLOB,
+ b'x': pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
+ b'l': pygit2.GIT_FILEMODE_LINK,
+ }[fl]
+ builders[dirname].insert(
+ pycompat.fsdecode(basename), gitutil.togitnode(n), mode
+ )
+ # This visits the buffered TreeBuilders in deepest-first
+ # order, bubbling up the edits.
+ for b in sorted(builders, key=len, reverse=True):
+ if b == b'':
+ break
+ cb = builders[b]
+ dn, bn = b.rsplit(b'/', 1)
+ builders[dn].insert(
+ pycompat.fsdecode(bn), cb.write(), pygit2.GIT_FILEMODE_TREE
+ )
+ return builders[b''].write().raw
--- a/hgext/hgk.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/hgk.py Thu Apr 16 22:51:09 2020 +0530
@@ -358,7 +358,7 @@
)
def revlist(ui, repo, *revs, **opts):
"""print revisions"""
- if opts[b'header']:
+ if opts['header']:
full = b"commit"
else:
full = None
--- a/hgext/histedit.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/histedit.py Thu Apr 16 22:51:09 2020 +0530
@@ -649,7 +649,7 @@
repo.ui.setconfig(
b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
)
- stats = mergemod.graft(repo, ctx, ctx.p1(), [b'local', b'histedit'])
+ stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
finally:
repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
return stats
@@ -835,10 +835,10 @@
return ctx, [(self.node, (parentctxnode,))]
parentctx = repo[parentctxnode]
- newcommits = set(
+ newcommits = {
c.node()
for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
- )
+ }
if not newcommits:
repo.ui.warn(
_(
@@ -945,7 +945,7 @@
class base(histeditaction):
def run(self):
if self.repo[b'.'].node() != self.node:
- mergemod.update(self.repo, self.node, branchmerge=False, force=True)
+ mergemod.clean_update(self.repo[self.node])
return self.continueclean()
def continuedirty(self):
@@ -1113,7 +1113,8 @@
class histeditrule(object):
- def __init__(self, ctx, pos, action=b'pick'):
+ def __init__(self, ui, ctx, pos, action=b'pick'):
+ self.ui = ui
self.ctx = ctx
self.action = action
self.origpos = pos
@@ -1153,6 +1154,14 @@
@property
def desc(self):
+ summary = (
+ cmdutil.rendertemplate(
+ self.ctx, self.ui.config(b'histedit', b'summary-template')
+ )
+ or b''
+ )
+ if summary:
+ return summary
# This is split off from the prefix property so that we can
# separately make the description for 'roll' red (since it
# will get discarded).
@@ -1258,7 +1267,7 @@
num_lines = len(mode_state[b'patchcontents'])
page_height = state[b'page_height']
unit = page_height if unit == b'page' else 1
- num_pages = 1 + (num_lines - 1) / page_height
+ num_pages = 1 + (num_lines - 1) // page_height
max_offset = (num_pages - 1) * page_height
newline = mode_state[b'line_offset'] + delta * unit
mode_state[b'line_offset'] = max(0, min(max_offset, newline))
@@ -1700,7 +1709,7 @@
ctxs = []
for i, r in enumerate(revs):
- ctxs.append(histeditrule(repo[r], i))
+ ctxs.append(histeditrule(ui, repo[r], i))
# Curses requires setting the locale or it will default to the C
# locale. This sets the locale to the user's default system
# locale.
@@ -2412,7 +2421,7 @@
Will abort if there are to many or too few rules, a malformed rule,
or a rule on a changeset outside of the user-given range.
"""
- expected = set(c.node() for c in ctxs)
+ expected = {c.node() for c in ctxs}
seen = set()
prev = None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,26 @@
+"""collection of simple hooks for common tasks (EXPERIMENTAL)
+
+This extension provides a number of simple hooks to handle issues
+commonly found in repositories with many contributors:
+- email notification when changesets move from draft to public phase
+- email notification when changesets are obsoleted
+- enforcement of draft phase for all incoming changesets
+- enforcement of a no-branch-merge policy
+- enforcement of a no-multiple-heads policy
+
+The implementation of the hooks is subject to change, e.g. whether to
+implement them as individual hooks or merge them into the notify
+extension as option. The functionality itself is planned to be supported
+long-term.
+"""
+from __future__ import absolute_import
+from . import (
+ changeset_obsoleted,
+ changeset_published,
+)
+
+# configtable is only picked up from the "top-level" module of the extension,
+# so expand it here to ensure all items are properly loaded
+configtable = {}
+configtable.update(changeset_published.configtable)
+configtable.update(changeset_obsoleted.configtable)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/changeset_obsoleted.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,131 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""changeset_obsoleted is a hook to send a mail when an
+existing draft changeset is obsoleted by an obsmarker without successor.
+
+Correct message threading requires the same messageidseed to be used for both
+the original notification and the new mail.
+
+Usage:
+ [notify]
+ messageidseed = myseed
+
+ [hooks]
+ pretxnclose.changeset_obsoleted = \
+ python:hgext.hooklib.changeset_obsoleted.hook
+"""
+
+from __future__ import absolute_import
+
+import email.errors as emailerrors
+import email.utils as emailutils
+
+from mercurial.i18n import _
+from mercurial import (
+ encoding,
+ error,
+ logcmdutil,
+ mail,
+ obsutil,
+ pycompat,
+ registrar,
+)
+from mercurial.utils import dateutil
+from .. import notify
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem(
+ b'notify_obsoleted', b'domain', default=None,
+)
+configitem(
+ b'notify_obsoleted', b'messageidseed', default=None,
+)
+configitem(
+ b'notify_obsoleted',
+ b'template',
+ default=b'''Subject: changeset abandoned
+
+This changeset has been abandoned.
+''',
+)
+
+
+def _report_commit(ui, repo, ctx):
+ domain = ui.config(b'notify_obsoleted', b'domain') or ui.config(
+ b'notify', b'domain'
+ )
+ messageidseed = ui.config(
+ b'notify_obsoleted', b'messageidseed'
+ ) or ui.config(b'notify', b'messageidseed')
+ template = ui.config(b'notify_obsoleted', b'template')
+ spec = logcmdutil.templatespec(template, None)
+ templater = logcmdutil.changesettemplater(ui, repo, spec)
+ ui.pushbuffer()
+ n = notify.notifier(ui, repo, b'incoming')
+
+ subs = set()
+ for sub, spec in n.subs:
+ if spec is None:
+ subs.add(sub)
+ continue
+ revs = repo.revs(b'%r and %d:', spec, ctx.rev())
+ if len(revs):
+ subs.add(sub)
+ continue
+ if len(subs) == 0:
+ ui.debug(
+ b'notify_obsoleted: no subscribers to selected repo and revset\n'
+ )
+ return
+
+ templater.show(
+ ctx,
+ changes=ctx.changeset(),
+ baseurl=ui.config(b'web', b'baseurl'),
+ root=repo.root,
+ webroot=n.root,
+ )
+ data = ui.popbuffer()
+
+ try:
+ msg = mail.parsebytes(data)
+ except emailerrors.MessageParseError as inst:
+ raise error.Abort(inst)
+
+ msg['In-reply-to'] = notify.messageid(ctx, domain, messageidseed)
+ msg['Message-Id'] = notify.messageid(
+ ctx, domain, messageidseed + b'-obsoleted'
+ )
+ msg['Date'] = encoding.strfromlocal(
+ dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
+ )
+ if not msg['From']:
+ sender = ui.config(b'email', b'from') or ui.username()
+ if b'@' not in sender or b'@localhost' in sender:
+ sender = n.fixmail(sender)
+ msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
+ msg['To'] = ', '.join(sorted(subs))
+
+ msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
+ if ui.configbool(b'notify', b'test'):
+ ui.write(msgtext)
+ if not msgtext.endswith(b'\n'):
+ ui.write(b'\n')
+ else:
+ ui.status(_(b'notify_obsoleted: sending mail for %d\n') % ctx.rev())
+ mail.sendmail(
+ ui, emailutils.parseaddr(msg['From'])[1], subs, msgtext, mbox=n.mbox
+ )
+
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ if hooktype != b"pretxnclose":
+ raise error.Abort(
+ _(b'Unsupported hook type %r') % pycompat.bytestr(hooktype)
+ )
+ for rev in obsutil.getobsoleted(repo, repo.currenttransaction()):
+ _report_commit(ui, repo, repo.unfiltered()[rev])
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/changeset_published.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,131 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""changeset_published is a hook to send a mail when an
+existing draft changeset is moved to the public phase.
+
+Correct message threading requires the same messageidseed to be used for both
+the original notification and the new mail.
+
+Usage:
+ [notify]
+ messageidseed = myseed
+
+ [hooks]
+ txnclose-phase.changeset_published = \
+ python:hgext.hooklib.changeset_published.hook
+"""
+
+from __future__ import absolute_import
+
+import email.errors as emailerrors
+import email.utils as emailutils
+
+from mercurial.i18n import _
+from mercurial import (
+ encoding,
+ error,
+ logcmdutil,
+ mail,
+ pycompat,
+ registrar,
+)
+from mercurial.utils import dateutil
+from .. import notify
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem(
+ b'notify_published', b'domain', default=None,
+)
+configitem(
+ b'notify_published', b'messageidseed', default=None,
+)
+configitem(
+ b'notify_published',
+ b'template',
+ default=b'''Subject: changeset published
+
+This changeset has been published.
+''',
+)
+
+
+def _report_commit(ui, repo, ctx):
+ domain = ui.config(b'notify_published', b'domain') or ui.config(
+ b'notify', b'domain'
+ )
+ messageidseed = ui.config(
+ b'notify_published', b'messageidseed'
+ ) or ui.config(b'notify', b'messageidseed')
+ template = ui.config(b'notify_published', b'template')
+ spec = logcmdutil.templatespec(template, None)
+ templater = logcmdutil.changesettemplater(ui, repo, spec)
+ ui.pushbuffer()
+ n = notify.notifier(ui, repo, b'incoming')
+
+ subs = set()
+ for sub, spec in n.subs:
+ if spec is None:
+ subs.add(sub)
+ continue
+ revs = repo.revs(b'%r and %d:', spec, ctx.rev())
+ if len(revs):
+ subs.add(sub)
+ continue
+ if len(subs) == 0:
+ ui.debug(
+ b'notify_published: no subscribers to selected repo and revset\n'
+ )
+ return
+
+ templater.show(
+ ctx,
+ changes=ctx.changeset(),
+ baseurl=ui.config(b'web', b'baseurl'),
+ root=repo.root,
+ webroot=n.root,
+ )
+ data = ui.popbuffer()
+
+ try:
+ msg = mail.parsebytes(data)
+ except emailerrors.MessageParseError as inst:
+ raise error.Abort(inst)
+
+ msg['In-reply-to'] = notify.messageid(ctx, domain, messageidseed)
+ msg['Message-Id'] = notify.messageid(
+ ctx, domain, messageidseed + b'-published'
+ )
+ msg['Date'] = encoding.strfromlocal(
+ dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
+ )
+ if not msg['From']:
+ sender = ui.config(b'email', b'from') or ui.username()
+ if b'@' not in sender or b'@localhost' in sender:
+ sender = n.fixmail(sender)
+ msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
+ msg['To'] = ', '.join(sorted(subs))
+
+ msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
+ if ui.configbool(b'notify', b'test'):
+ ui.write(msgtext)
+ if not msgtext.endswith(b'\n'):
+ ui.write(b'\n')
+ else:
+ ui.status(_(b'notify_published: sending mail for %d\n') % ctx.rev())
+ mail.sendmail(
+ ui, emailutils.parseaddr(msg['From'])[1], subs, msgtext, mbox=n.mbox
+ )
+
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ if hooktype != b"txnclose-phase":
+ raise error.Abort(
+ _(b'Unsupported hook type %r') % pycompat.bytestr(hooktype)
+ )
+ ctx = repo.unfiltered()[node]
+ if kwargs['oldphase'] == b'draft' and kwargs['phase'] == b'public':
+ _report_commit(ui, repo, ctx)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/enforce_draft_commits.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,45 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""enforce_draft_commits us a hook to ensure that all new changesets are
+in the draft phase. This allows enforcing policies for work-in-progress
+changes in overlay repositories, i.e. a shared hidden repositories with
+different views for work-in-progress code and public history.
+
+Usage:
+ [hooks]
+ pretxnclose-phase.enforce_draft_commits = \
+ python:hgext.hooklib.enforce_draft_commits.hook
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ error,
+ pycompat,
+)
+
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ if hooktype != b"pretxnclose-phase":
+ raise error.Abort(
+ _(b'Unsupported hook type %r') % pycompat.bytestr(hooktype)
+ )
+ ctx = repo.unfiltered()[node]
+ if kwargs['oldphase']:
+ raise error.Abort(
+ _(b'Phase change from %r to %r for %s rejected')
+ % (
+ pycompat.bytestr(kwargs['oldphase']),
+ pycompat.bytestr(kwargs['phase']),
+ ctx,
+ )
+ )
+ elif kwargs['phase'] != b'draft':
+ raise error.Abort(
+ _(b'New changeset %s in phase %r rejected')
+ % (ctx, pycompat.bytestr(kwargs['phase']))
+ )
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/reject_merge_commits.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,45 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""reject_merge_commits is a hook to check new changesets for merge commits.
+Merge commits are allowed only between different branches, i.e. merging
+a feature branch into the main development branch. This can be used to
+enforce policies for linear commit histories.
+
+Usage:
+ [hooks]
+ pretxnchangegroup.reject_merge_commits = \
+ python:hgext.hooklib.reject_merge_commits.hook
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ error,
+ pycompat,
+)
+
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ if hooktype != b"pretxnchangegroup":
+ raise error.Abort(
+ _(b'Unsupported hook type %r') % pycompat.bytestr(hooktype)
+ )
+
+ ctx = repo.unfiltered()[node]
+ for rev in repo.changelog.revs(start=ctx.rev()):
+ rev = repo[rev]
+ parents = rev.parents()
+ if len(parents) < 2:
+ continue
+ if all(repo[p].branch() == rev.branch() for p in parents):
+ raise error.Abort(
+ _(
+ b'%s rejected as merge on the same branch. '
+ b'Please consider rebase.'
+ )
+ % rev
+ )
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/hooklib/reject_new_heads.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,41 @@
+# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""reject_new_heads is a hook to check that branches touched by new changesets
+have at most one open head. It can be used to enforce policies for
+merge-before-push or rebase-before-push. It does not handle pre-existing
+hydras.
+
+Usage:
+ [hooks]
+ pretxnclose.reject_new_heads = \
+ python:hgext.hooklib.reject_new_heads.hook
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ error,
+ pycompat,
+)
+
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ if hooktype != b"pretxnclose":
+ raise error.Abort(
+ _(b'Unsupported hook type %r') % pycompat.bytestr(hooktype)
+ )
+ ctx = repo.unfiltered()[node]
+ branches = set()
+ for rev in repo.changelog.revs(start=ctx.rev()):
+ rev = repo[rev]
+ branches.add(rev.branch())
+ for branch in branches:
+ if len(repo.revs("head() and not closed() and branch(%s)", branch)) > 1:
+ raise error.Abort(
+ _(b'Changes on branch %r resulted in multiple heads')
+ % pycompat.bytestr(branch)
+ )
--- a/hgext/largefiles/basestore.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/largefiles/basestore.py Thu Apr 16 22:51:09 2020 +0530
@@ -67,7 +67,7 @@
ui = self.ui
at = 0
- available = self.exists(set(hash for (_filename, hash) in files))
+ available = self.exists({hash for (_filename, hash) in files})
with ui.makeprogress(
_(b'getting largefiles'), unit=_(b'files'), total=len(files)
) as progress:
--- a/hgext/largefiles/lfutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/largefiles/lfutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -92,16 +92,30 @@
path = ui.configpath(name, b'usercache')
if path:
return path
+
+ hint = None
+
if pycompat.iswindows:
appdata = encoding.environ.get(
b'LOCALAPPDATA', encoding.environ.get(b'APPDATA')
)
if appdata:
return os.path.join(appdata, name)
+
+ hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
+ b"LOCALAPPDATA",
+ b"APPDATA",
+ name,
+ )
elif pycompat.isdarwin:
home = encoding.environ.get(b'HOME')
if home:
return os.path.join(home, b'Library', b'Caches', name)
+
+ hint = _(b"define %s in the environment, or set %s.usercache") % (
+ b"HOME",
+ name,
+ )
elif pycompat.isposix:
path = encoding.environ.get(b'XDG_CACHE_HOME')
if path:
@@ -109,11 +123,18 @@
home = encoding.environ.get(b'HOME')
if home:
return os.path.join(home, b'.cache', name)
+
+ hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
+ b"XDG_CACHE_HOME",
+ b"HOME",
+ name,
+ )
else:
raise error.Abort(
_(b'unknown operating system: %s\n') % pycompat.osname
)
- raise error.Abort(_(b'unknown %s usercache location') % name)
+
+ raise error.Abort(_(b'unknown %s usercache location') % name, hint=hint)
def inusercache(ui, hash):
--- a/hgext/largefiles/overrides.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/largefiles/overrides.py Thu Apr 16 22:51:09 2020 +0530
@@ -1564,11 +1564,11 @@
def overriderollback(orig, ui, repo, **opts):
with repo.wlock():
before = repo.dirstate.parents()
- orphans = set(
+ orphans = {
f
for f in repo.dirstate
if lfutil.isstandin(f) and repo.dirstate[f] != b'r'
- )
+ }
result = orig(ui, repo, **opts)
after = repo.dirstate.parents()
if before == after:
--- a/hgext/largefiles/remotestore.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/largefiles/remotestore.py Thu Apr 16 22:51:09 2020 +0530
@@ -48,12 +48,12 @@
)
def exists(self, hashes):
- return dict(
- (h, s == 0)
+ return {
+ h: s == 0
for (h, s) in pycompat.iteritems(
self._stat(hashes)
) # dict-from-generator
- )
+ }
def sendfile(self, filename, hash):
self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
--- a/hgext/lfs/TODO.rst Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/lfs/TODO.rst Thu Apr 16 22:51:09 2020 +0530
@@ -38,9 +38,6 @@
* `hg diff` is similar, and probably shouldn't see the pointer file
-#. `Fix https multiplexing, and re-enable workers
- <https://www.mercurial-scm.org/pipermail/mercurial-devel/2018-January/109916.html>`_.
-
#. Show to-be-applied rules with `hg files -r 'wdir()' 'set:lfs()'`
* `debugignore` can show file + line number, so a dedicated command could be
--- a/hgext/lfs/__init__.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/lfs/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -181,7 +181,7 @@
b'experimental', b'lfs.disableusercache', default=False,
)
eh.configitem(
- b'experimental', b'lfs.worker-enable', default=False,
+ b'experimental', b'lfs.worker-enable', default=True,
)
eh.configitem(
--- a/hgext/lfs/blobstore.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/lfs/blobstore.py Thu Apr 16 22:51:09 2020 +0530
@@ -21,6 +21,7 @@
from mercurial import (
encoding,
error,
+ httpconnection as httpconnectionmod,
node,
pathutil,
pycompat,
@@ -94,33 +95,16 @@
pass
-class filewithprogress(object):
- """a file-like object that supports __len__ and read.
-
- Useful to provide progress information for how many bytes are read.
+class lfsuploadfile(httpconnectionmod.httpsendfile):
+ """a file-like object that supports keepalive.
"""
- def __init__(self, fp, callback):
- self._fp = fp
- self._callback = callback # func(readsize)
- fp.seek(0, os.SEEK_END)
- self._len = fp.tell()
- fp.seek(0)
-
- def __len__(self):
- return self._len
+ def __init__(self, ui, filename):
+ super(lfsuploadfile, self).__init__(ui, filename, b'rb')
+ self.read = self._data.read
- def read(self, size):
- if self._fp is None:
- return b''
- data = self._fp.read(size)
- if data:
- if self._callback:
- self._callback(len(data))
- else:
- self._fp.close()
- self._fp = None
- return data
+ def _makeprogress(self):
+ return None # progress is handled by the worker client
class local(object):
@@ -144,6 +128,17 @@
def open(self, oid):
"""Open a read-only file descriptor to the named blob, in either the
usercache or the local store."""
+ return open(self.path(oid), 'rb')
+
+ def path(self, oid):
+ """Build the path for the given blob ``oid``.
+
+ If the blob exists locally, the path may point to either the usercache
+ or the local store. If it doesn't, it will point to the local store.
+ This is meant for situations where existing code that isn't LFS aware
+ needs to open a blob. Generally, prefer the ``open`` method on this
+ class.
+ """
# The usercache is the most likely place to hold the file. Commit will
# write to both it and the local store, as will anything that downloads
# the blobs. However, things like clone without an update won't
@@ -151,9 +146,9 @@
# the usercache is the only place it _could_ be. If not present, the
# missing file msg here will indicate the local repo, not the usercache.
if self.cachevfs.exists(oid):
- return self.cachevfs(oid, b'rb')
+ return self.cachevfs.join(oid)
- return self.vfs(oid, b'rb')
+ return self.vfs.join(oid)
def download(self, oid, src, content_length):
"""Read the blob from the remote source in chunks, verify the content,
@@ -495,15 +490,17 @@
_(b'detected corrupt lfs object: %s') % oid,
hint=_(b'run hg verify'),
)
- request.data = filewithprogress(localstore.open(oid), None)
- request.get_method = lambda: r'PUT'
- request.add_header('Content-Type', 'application/octet-stream')
- request.add_header('Content-Length', len(request.data))
for k, v in headers:
request.add_header(pycompat.strurl(k), pycompat.strurl(v))
try:
+ if action == b'upload':
+ request.data = lfsuploadfile(self.ui, localstore.path(oid))
+ request.get_method = lambda: 'PUT'
+ request.add_header('Content-Type', 'application/octet-stream')
+ request.add_header('Content-Length', request.data.length)
+
with contextlib.closing(self.urlopener.open(request)) as res:
contentlength = res.info().get(b"content-length")
ui = self.ui # Shorten debug lines
@@ -545,6 +542,9 @@
raise LfsRemoteError(
_(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
)
+ finally:
+ if request.data:
+ request.data.close()
def _batch(self, pointers, localstore, action):
if action not in [b'upload', b'download']:
--- a/hgext/logtoprocess.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/logtoprocess.py Thu Apr 16 22:51:09 2020 +0530
@@ -59,6 +59,13 @@
def log(self, ui, event, msg, opts):
script = self._scripts[event]
+ maxmsg = 100000
+ if len(msg) > maxmsg:
+ # Each env var has a 128KiB limit on linux. msg can be long, in
+ # particular for command event, where it's the full command line.
+ # Prefer truncating the message than raising "Argument list too
+ # long" error.
+ msg = msg[:maxmsg] + b' (truncated)'
env = {
b'EVENT': event,
b'HGPID': os.getpid(),
--- a/hgext/mq.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/mq.py Thu Apr 16 22:51:09 2020 +0530
@@ -858,7 +858,7 @@
strip(self.ui, repo, [n], update=False, backup=False)
ctx = repo[rev]
- ret = hg.merge(repo, rev)
+ ret = hg.merge(ctx, remind=False)
if ret:
raise error.Abort(_(b"update returned %d") % ret)
n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
@@ -1162,7 +1162,7 @@
if unknown:
if numrevs:
- rev = dict((entry.name, entry.node) for entry in qfinished)
+ rev = {entry.name: entry.node for entry in qfinished}
for p in unknown:
msg = _(b'revision %s refers to unknown patches: %s\n')
self.ui.warn(msg % (short(rev[p]), p))
@@ -3361,7 +3361,7 @@
ui.write(b'\n')
q = repo.mq
- applied = set(p.name for p in q.applied)
+ applied = {p.name for p in q.applied}
patch = None
args = list(args)
if opts.get('list'):
--- a/hgext/notify.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/notify.py Thu Apr 16 22:51:09 2020 +0530
@@ -133,6 +133,15 @@
the "From" field of the notification mail. If not set, take the user
from the pushing repo. Default: False.
+notify.reply-to-predecessor (EXPERIMENTAL)
+ If set and the changeset has a predecessor in the repository, try to thread
+ the notification mail with the predecessor. This adds the "In-Reply-To" header
+ to the notification mail with a reference to the predecessor with the smallest
+ revision number. Mail threads can still be torn, especially when changesets
+ are folded.
+
+ This option must be used in combination with ``notify.messageidseed``.
+
If set, the following entries will also be used to customize the
notifications:
@@ -160,6 +169,7 @@
error,
logcmdutil,
mail,
+ obsutil,
patch,
pycompat,
registrar,
@@ -219,6 +229,9 @@
b'notify', b'outgoing', default=None,
)
configitem(
+ b'notify', b'reply-to-predecessor', default=False,
+)
+configitem(
b'notify', b'sources', default=b'serve',
)
configitem(
@@ -281,6 +294,16 @@
self.merge = self.ui.configbool(b'notify', b'merge')
self.showfunc = self.ui.configbool(b'notify', b'showfunc')
self.messageidseed = self.ui.config(b'notify', b'messageidseed')
+ self.reply = self.ui.configbool(b'notify', b'reply-to-predecessor')
+
+ if self.reply and not self.messageidseed:
+ raise error.Abort(
+ _(
+ b'notify.reply-to-predecessor used without '
+ b'notify.messageidseed'
+ )
+ )
+
if self.showfunc is None:
self.showfunc = self.ui.configbool(b'diff', b'showfunc')
@@ -437,6 +460,26 @@
msg['X-Hg-Notification'] = 'changeset %s' % ctx
if not msg['Message-Id']:
msg['Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
+ if self.reply:
+ unfi = self.repo.unfiltered()
+ has_node = unfi.changelog.index.has_node
+ predecessors = [
+ unfi[ctx2]
+ for ctx2 in obsutil.allpredecessors(unfi.obsstore, [ctx.node()])
+ if ctx2 != ctx.node() and has_node(ctx2)
+ ]
+ if predecessors:
+ # There is at least one predecessor, so which to pick?
+ # Ideally, there is a unique root because changesets have
+ # been evolved/rebased one step at a time. In this case,
+ # just picking the oldest known changeset provides a stable
+ # base. It doesn't help when changesets are folded. Any
+ # better solution would require storing more information
+ # in the repository.
+ pred = min(predecessors, key=lambda ctx: ctx.rev())
+ msg['In-Reply-To'] = messageid(
+ pred, self.domain, self.messageidseed
+ )
msg['To'] = ', '.join(sorted(subs))
msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
--- a/hgext/phabricator.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/phabricator.py Thu Apr 16 22:51:09 2020 +0530
@@ -54,13 +54,14 @@
import operator
import re
-from mercurial.node import bin, nullid
+from mercurial.node import bin, nullid, short
from mercurial.i18n import _
from mercurial.pycompat import getattr
from mercurial.thirdparty import attr
from mercurial import (
cmdutil,
context,
+ copies,
encoding,
error,
exthelper,
@@ -114,6 +115,10 @@
eh.configitem(
b'phabricator', b'curlcmd', default=None,
)
+# developer config: phabricator.debug
+eh.configitem(
+ b'phabricator', b'debug', default=False,
+)
# developer config: phabricator.repophid
eh.configitem(
b'phabricator', b'repophid', default=None,
@@ -124,6 +129,12 @@
eh.configitem(
b'phabsend', b'confirm', default=False,
)
+eh.configitem(
+ b'phabimport', b'secret', default=False,
+)
+eh.configitem(
+ b'phabimport', b'obsolete', default=False,
+)
colortable = {
b'phabricator.action.created': b'green',
@@ -257,19 +268,36 @@
return fn(*args, **kwargs)
return fn(*args, **kwargs)
- inner.__name__ = fn.__name__
- inner.__doc__ = fn.__doc__
+ cmd = util.checksignature(inner, depth=2)
+ cmd.__name__ = fn.__name__
+ cmd.__doc__ = fn.__doc__
+
return command(
name,
fullflags,
spec,
helpcategory=helpcategory,
optionalrepo=optionalrepo,
- )(inner)
+ )(cmd)
return decorate
+def _debug(ui, *msg, **opts):
+ """write debug output for Phabricator if ``phabricator.debug`` is set
+
+ Specifically, this avoids dumping Conduit and HTTP auth chatter that is
+ printed with the --debug argument.
+ """
+ if ui.configbool(b"phabricator", b"debug"):
+ flag = ui.debugflag
+ try:
+ ui.debugflag = True
+ ui.write(*msg, **opts)
+ finally:
+ ui.debugflag = flag
+
+
def urlencodenested(params):
"""like urlencode, but works with nested parameters.
@@ -446,7 +474,8 @@
has_node = unfi.changelog.index.has_node
result = {} # {node: (oldnode?, lastdiff?, drev)}
- toconfirm = {} # {node: (force, {precnode}, drev)}
+ # ordered for test stability when printing new -> old mapping below
+ toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
for node in nodelist:
ctx = unfi[node]
# For tags like "D123", put them into "toconfirm" to verify later
@@ -474,18 +503,23 @@
alldiffs = callconduit(
unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
)
- getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
+
+ def getnodes(d, precset):
+ # Ignore other nodes that were combined into the Differential
+ # that aren't predecessors of the current local node.
+ return [n for n in getlocalcommits(d) if n in precset]
+
for newnode, (force, precset, drev) in toconfirm.items():
diffs = [
d for d in alldiffs.values() if int(d[b'revisionID']) == drev
]
- # "precursors" as known by Phabricator
- phprecset = set(getnode(d) for d in diffs)
+ # local predecessors known by Phabricator
+ phprecset = {n for d in diffs for n in getnodes(d, precset)}
# Ignore if precursors (Phabricator and local repo) do not overlap,
# and force is not set (when commit message says nothing)
- if not force and not bool(phprecset & precset):
+ if not force and not phprecset:
tagname = b'D%d' % drev
tags.tag(
repo,
@@ -510,7 +544,33 @@
oldnode = lastdiff = None
if diffs:
lastdiff = max(diffs, key=lambda d: int(d[b'id']))
- oldnode = getnode(lastdiff)
+ oldnodes = getnodes(lastdiff, precset)
+
+ _debug(
+ unfi.ui,
+ b"%s mapped to old nodes %s\n"
+ % (
+ short(newnode),
+ stringutil.pprint([short(n) for n in sorted(oldnodes)]),
+ ),
+ )
+
+ # If this commit was the result of `hg fold` after submission,
+ # and now resubmitted with --fold, the easiest thing to do is
+ # to leave the node clear. This only results in creating a new
+ # diff for the _same_ Differential Revision if this commit is
+ # the first or last in the selected range. If we picked a node
+ # from the list instead, it would have to be the lowest if at
+ # the beginning of the --fold range, or the highest at the end.
+ # Otherwise, one or more of the nodes wouldn't be considered in
+ # the diff, and the Differential wouldn't be properly updated.
+ # If this commit is the result of `hg split` in the same
+ # scenario, there is a single oldnode here (and multiple
+ # newnodes mapped to it). That makes it the same as the normal
+ # case, as the edges of the newnode range cleanly maps to one
+ # oldnode each.
+ if len(oldnodes) == 1:
+ oldnode = oldnodes[0]
if oldnode and not has_node(oldnode):
oldnode = None
@@ -542,11 +602,11 @@
return result
-def getdiff(ctx, diffopts):
+def getdiff(basectx, ctx, diffopts):
"""plain-text diff without header (user, commit message, etc)"""
output = util.stringio()
for chunk, _label in patch.diffui(
- ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
+ ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
):
output.write(chunk)
return output.getvalue()
@@ -653,13 +713,13 @@
)
-def maketext(pchange, ctx, fname):
+def maketext(pchange, basectx, ctx, fname):
"""populate the phabchange for a text file"""
repo = ctx.repo()
fmatcher = match.exact([fname])
diffopts = mdiff.diffopts(git=True, context=32767)
_pfctx, _fctx, header, fhunks = next(
- patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
+ patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
)
for fhunk in fhunks:
@@ -747,12 +807,14 @@
return fphid
-def addoldbinary(pchange, fctx):
+def addoldbinary(pchange, oldfctx, fctx):
"""add the metadata for the previous version of a binary file to the
phabchange for the new version
+
+ ``oldfctx`` is the previous version of the file; ``fctx`` is the new
+ version of the file, or None if the file is being removed.
"""
- oldfctx = fctx.p1()
- if fctx.cmp(oldfctx):
+ if not fctx or fctx.cmp(oldfctx):
# Files differ, add the old one
pchange.metadata[b'old:file:size'] = oldfctx.size()
mimeguess, _enc = mimetypes.guess_type(
@@ -794,8 +856,6 @@
"""
try:
fctx.data().decode('utf-8')
- if fctx.parents():
- fctx.p1().data().decode('utf-8')
return False
except UnicodeDecodeError:
fctx.repo().ui.write(
@@ -805,56 +865,76 @@
return True
-def addremoved(pdiff, ctx, removed):
+def addremoved(pdiff, basectx, ctx, removed):
"""add removed files to the phabdiff. Shouldn't include moves"""
for fname in removed:
pchange = phabchange(
currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
)
- pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
- fctx = ctx.p1()[fname]
- if not (fctx.isbinary() or notutf8(fctx)):
- maketext(pchange, ctx, fname)
+ oldfctx = basectx.p1()[fname]
+ pchange.addoldmode(gitmode[oldfctx.flags()])
+ if not (oldfctx.isbinary() or notutf8(oldfctx)):
+ maketext(pchange, basectx, ctx, fname)
pdiff.addchange(pchange)
-def addmodified(pdiff, ctx, modified):
+def addmodified(pdiff, basectx, ctx, modified):
"""add modified files to the phabdiff"""
for fname in modified:
fctx = ctx[fname]
+ oldfctx = basectx.p1()[fname]
pchange = phabchange(currentPath=fname, oldPath=fname)
- filemode = gitmode[ctx[fname].flags()]
- originalmode = gitmode[ctx.p1()[fname].flags()]
+ filemode = gitmode[fctx.flags()]
+ originalmode = gitmode[oldfctx.flags()]
if filemode != originalmode:
pchange.addoldmode(originalmode)
pchange.addnewmode(filemode)
- if fctx.isbinary() or notutf8(fctx):
+ if (
+ fctx.isbinary()
+ or notutf8(fctx)
+ or oldfctx.isbinary()
+ or notutf8(oldfctx)
+ ):
makebinary(pchange, fctx)
- addoldbinary(pchange, fctx)
+ addoldbinary(pchange, oldfctx, fctx)
else:
- maketext(pchange, ctx, fname)
+ maketext(pchange, basectx, ctx, fname)
pdiff.addchange(pchange)
-def addadded(pdiff, ctx, added, removed):
+def addadded(pdiff, basectx, ctx, added, removed):
"""add file adds to the phabdiff, both new files and copies/moves"""
# Keep track of files that've been recorded as moved/copied, so if there are
# additional copies we can mark them (moves get removed from removed)
copiedchanges = {}
movedchanges = {}
+
+ copy = {}
+ if basectx != ctx:
+ copy = copies.pathcopies(basectx.p1(), ctx)
+
for fname in added:
fctx = ctx[fname]
+ oldfctx = None
pchange = phabchange(currentPath=fname)
- filemode = gitmode[ctx[fname].flags()]
- renamed = fctx.renamed()
+ filemode = gitmode[fctx.flags()]
+
+ if copy:
+ originalfname = copy.get(fname, fname)
+ else:
+ originalfname = fname
+ if fctx.renamed():
+ originalfname = fctx.renamed()[0]
+
+ renamed = fname != originalfname
if renamed:
- originalfname = renamed[0]
- originalmode = gitmode[ctx.p1()[originalfname].flags()]
+ oldfctx = basectx.p1()[originalfname]
+ originalmode = gitmode[oldfctx.flags()]
pchange.oldPath = originalfname
if originalfname in removed:
@@ -889,12 +969,16 @@
pchange.addnewmode(gitmode[fctx.flags()])
pchange.type = DiffChangeType.ADD
- if fctx.isbinary() or notutf8(fctx):
+ if (
+ fctx.isbinary()
+ or notutf8(fctx)
+ or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
+ ):
makebinary(pchange, fctx)
if renamed:
- addoldbinary(pchange, fctx)
+ addoldbinary(pchange, oldfctx, fctx)
else:
- maketext(pchange, ctx, fname)
+ maketext(pchange, basectx, ctx, fname)
pdiff.addchange(pchange)
@@ -904,21 +988,21 @@
pdiff.addchange(movedchange)
-def creatediff(ctx):
+def creatediff(basectx, ctx):
"""create a Differential Diff"""
repo = ctx.repo()
repophid = getrepophid(repo)
# Create a "Differential Diff" via "differential.creatediff" API
pdiff = phabdiff(
- sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
+ sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
branch=b'%s' % ctx.branch(),
)
- modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
+ modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
# addadded will remove moved files from removed, so addremoved won't get
# them
- addadded(pdiff, ctx, added, removed)
- addmodified(pdiff, ctx, modified)
- addremoved(pdiff, ctx, removed)
+ addadded(pdiff, basectx, ctx, added, removed)
+ addmodified(pdiff, basectx, ctx, modified)
+ addremoved(pdiff, basectx, ctx, removed)
if repophid:
pdiff.repositoryPHID = repophid
diff = callconduit(
@@ -927,52 +1011,64 @@
pycompat.byteskwargs(attr.asdict(pdiff)),
)
if not diff:
- raise error.Abort(_(b'cannot create diff for %s') % ctx)
+ if basectx != ctx:
+ msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
+ else:
+ msg = _(b'cannot create diff for %s') % ctx
+ raise error.Abort(msg)
return diff
-def writediffproperties(ctx, diff):
- """write metadata to diff so patches could be applied losslessly"""
+def writediffproperties(ctxs, diff):
+ """write metadata to diff so patches could be applied losslessly
+
+ ``ctxs`` is the list of commits that created the diff, in ascending order.
+ The list is generally a single commit, but may be several when using
+ ``phabsend --fold``.
+ """
# creatediff returns with a diffid but query returns with an id
diffid = diff.get(b'diffid', diff.get(b'id'))
+ basectx = ctxs[0]
+ tipctx = ctxs[-1]
+
params = {
b'diff_id': diffid,
b'name': b'hg:meta',
b'data': templatefilters.json(
{
- b'user': ctx.user(),
- b'date': b'%d %d' % ctx.date(),
- b'branch': ctx.branch(),
- b'node': ctx.hex(),
- b'parent': ctx.p1().hex(),
+ b'user': tipctx.user(),
+ b'date': b'%d %d' % tipctx.date(),
+ b'branch': tipctx.branch(),
+ b'node': tipctx.hex(),
+ b'parent': basectx.p1().hex(),
}
),
}
- callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
+ callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
+ commits = {}
+ for ctx in ctxs:
+ commits[ctx.hex()] = {
+ b'author': stringutil.person(ctx.user()),
+ b'authorEmail': stringutil.email(ctx.user()),
+ b'time': int(ctx.date()[0]),
+ b'commit': ctx.hex(),
+ b'parents': [ctx.p1().hex()],
+ b'branch': ctx.branch(),
+ }
params = {
b'diff_id': diffid,
b'name': b'local:commits',
- b'data': templatefilters.json(
- {
- ctx.hex(): {
- b'author': stringutil.person(ctx.user()),
- b'authorEmail': stringutil.email(ctx.user()),
- b'time': int(ctx.date()[0]),
- b'commit': ctx.hex(),
- b'parents': [ctx.p1().hex()],
- b'branch': ctx.branch(),
- },
- }
- ),
+ b'data': templatefilters.json(commits),
}
- callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
+ callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
def createdifferentialrevision(
- ctx,
+ ctxs,
revid=None,
parentrevphid=None,
+ oldbasenode=None,
oldnode=None,
olddiff=None,
actions=None,
@@ -983,22 +1079,38 @@
If revid is None, create a new Differential Revision, otherwise update
revid. If parentrevphid is not None, set it as a dependency.
+ If there is a single commit for the new Differential Revision, ``ctxs`` will
+ be a list of that single context. Otherwise, it is a list that covers the
+ range of changes for the differential, where ``ctxs[0]`` is the first change
+ to include and ``ctxs[-1]`` is the last.
+
If oldnode is not None, check if the patch content (without commit message
- and metadata) has changed before creating another diff.
+ and metadata) has changed before creating another diff. For a Revision with
+ a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
+ Revision covering multiple commits, ``oldbasenode`` corresponds to
+ ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
+ corresponds to ``ctxs[-1]``.
If actions is not None, they will be appended to the transaction.
"""
+ ctx = ctxs[-1]
+ basectx = ctxs[0]
+
repo = ctx.repo()
if oldnode:
diffopts = mdiff.diffopts(git=True, context=32767)
- oldctx = repo.unfiltered()[oldnode]
- neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
+ unfi = repo.unfiltered()
+ oldctx = unfi[oldnode]
+ oldbasectx = unfi[oldbasenode]
+ neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
+ oldbasectx, oldctx, diffopts
+ )
else:
neednewdiff = True
transactions = []
if neednewdiff:
- diff = creatediff(ctx)
+ diff = creatediff(basectx, ctx)
transactions.append({b'type': b'update', b'value': diff[b'phid']})
if comment:
transactions.append({b'type': b'comment', b'value': comment})
@@ -1008,7 +1120,7 @@
# pushers could know the correct node metadata.
assert olddiff
diff = olddiff
- writediffproperties(ctx, diff)
+ writediffproperties(ctxs, diff)
# Set the parent Revision every time, so commit re-ordering is picked-up
if parentrevphid:
@@ -1019,14 +1131,42 @@
if actions:
transactions += actions
- # Parse commit message and update related fields.
- desc = ctx.description()
- info = callconduit(
- repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
- )
- for k, v in info[b'fields'].items():
- if k in [b'title', b'summary', b'testPlan']:
- transactions.append({b'type': k, b'value': v})
+ # When folding multiple local commits into a single review, arcanist will
+ # take the summary line of the first commit as the title, and then
+ # concatenate the rest of the remaining messages (including each of their
+ # first lines) to the rest of the first commit message (each separated by
+ # an empty line), and use that as the summary field. Do the same here.
+ # For commits with only a one line message, there is no summary field, as
+ # this gets assigned to the title.
+ fields = util.sortdict() # sorted for stable wire protocol in tests
+
+ for i, _ctx in enumerate(ctxs):
+ # Parse commit message and update related fields.
+ desc = _ctx.description()
+ info = callconduit(
+ repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
+ )
+
+ for k in [b'title', b'summary', b'testPlan']:
+ v = info[b'fields'].get(k)
+ if not v:
+ continue
+
+ if i == 0:
+ # Title, summary and test plan (if present) are taken verbatim
+ # for the first commit.
+ fields[k] = v.rstrip()
+ continue
+ elif k == b'title':
+ # Add subsequent titles (i.e. the first line of the commit
+ # message) back to the summary.
+ k = b'summary'
+
+ # Append any current field to the existing composite field
+ fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
+
+ for k, v in fields.items():
+ transactions.append({b'type': k, b'value': v})
params = {b'transactions': transactions}
if revid is not None:
@@ -1035,20 +1175,24 @@
revision = callconduit(repo.ui, b'differential.revision.edit', params)
if not revision:
- raise error.Abort(_(b'cannot create revision for %s') % ctx)
+ if len(ctxs) == 1:
+ msg = _(b'cannot create revision for %s') % ctx
+ else:
+ msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
+ raise error.Abort(msg)
return revision, diff
-def userphids(repo, names):
+def userphids(ui, names):
"""convert user names to PHIDs"""
names = [name.lower() for name in names]
query = {b'constraints': {b'usernames': names}}
- result = callconduit(repo.ui, b'user.search', query)
+ result = callconduit(ui, b'user.search', query)
# username not found is not an error of the API. So check if we have missed
# some names here.
data = result[b'data']
- resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
+ resolved = {entry[b'fields'][b'username'].lower() for entry in data}
unresolved = set(names) - resolved
if unresolved:
raise error.Abort(
@@ -1057,6 +1201,45 @@
return [entry[b'phid'] for entry in data]
+def _print_phabsend_action(ui, ctx, newrevid, action):
+ """print the ``action`` that occurred when posting ``ctx`` for review
+
+ This is a utility function for the sending phase of ``phabsend``, which
+ makes it easier to show a status for all local commits with `--fold``.
+ """
+ actiondesc = ui.label(
+ {
+ b'created': _(b'created'),
+ b'skipped': _(b'skipped'),
+ b'updated': _(b'updated'),
+ }[action],
+ b'phabricator.action.%s' % action,
+ )
+ drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
+ nodedesc = ui.label(bytes(ctx), b'phabricator.node')
+ desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
+ ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
+
+
+def _amend_diff_properties(unfi, drevid, newnodes, diff):
+ """update the local commit list for the ``diff`` associated with ``drevid``
+
+ This is a utility function for the amend phase of ``phabsend``, which
+ converts failures to warning messages.
+ """
+ _debug(
+ unfi.ui,
+ b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
+ )
+
+ try:
+ writediffproperties([unfi[newnode] for newnode in newnodes], diff)
+ except util.urlerr.urlerror:
+ # If it fails just warn and keep going, otherwise the DREV
+ # associations will be lost
+ unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
+
+
@vcrcommand(
b'phabsend',
[
@@ -1071,6 +1254,7 @@
_(b'add a comment to Revisions with new/updated Diffs'),
),
(b'', b'confirm', None, _(b'ask for confirmation before sending')),
+ (b'', b'fold', False, _(b'combine the revisions into one review')),
],
_(b'REV [OPTIONS]'),
helpcategory=command.CATEGORY_IMPORT_EXPORT,
@@ -1099,6 +1283,12 @@
[phabsend]
confirm = true
+ By default, a separate review will be created for each commit that is
+ selected, and will have the same parent/child relationship in Phabricator.
+ If ``--fold`` is set, multiple commits are rolled up into a single review
+ as if diffed from the parent of the first revision to the last. The commit
+ messages are concatenated in the summary field on Phabricator.
+
phabsend will check obsstore and the above association to decide whether to
update an existing Differential Revision, or create a new one.
"""
@@ -1112,6 +1302,47 @@
if opts.get(b'amend'):
cmdutil.checkunfinished(repo)
+ ctxs = [repo[rev] for rev in revs]
+
+ if any(c for c in ctxs if c.obsolete()):
+ raise error.Abort(_(b"obsolete commits cannot be posted for review"))
+
+ fold = opts.get(b'fold')
+ if fold:
+ if len(revs) == 1:
+ # TODO: just switch to --no-fold instead?
+ raise error.Abort(_(b"cannot fold a single revision"))
+
+ # There's no clear way to manage multiple commits with a Dxxx tag, so
+ # require the amend option. (We could append "_nnn", but then it
+ # becomes jumbled if earlier commits are added to an update.) It should
+ # lock the repo and ensure that the range is editable, but that would
+ # make the code pretty convoluted. The default behavior of `arc` is to
+ # create a new review anyway.
+ if not opts.get(b"amend"):
+ raise error.Abort(_(b"cannot fold with --no-amend"))
+
+ # Ensure the local commits are an unbroken range
+ revrange = repo.revs(b'(first(%ld)::last(%ld))', revs, revs)
+ if any(r for r in revs if r not in revrange) or any(
+ r for r in revrange if r not in revs
+ ):
+ raise error.Abort(_(b"cannot fold non-linear revisions"))
+
+ # It might be possible to bucketize the revisions by the DREV value, and
+ # iterate over those groups when posting, and then again when amending.
+ # But for simplicity, require all selected revisions to be for the same
+ # DREV (if present). Adding local revisions to an existing DREV is
+ # acceptable.
+ drevmatchers = [
+ _differentialrevisiondescre.search(ctx.description())
+ for ctx in ctxs
+ ]
+ if len({m.group('url') for m in drevmatchers if m}) > 1:
+ raise error.Abort(
+ _(b"cannot fold revisions with different DREV values")
+ )
+
# {newnode: (oldnode, olddiff, olddrev}
oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
@@ -1127,10 +1358,13 @@
blockers = opts.get(b'blocker', [])
phids = []
if reviewers:
- phids.extend(userphids(repo, reviewers))
+ phids.extend(userphids(repo.ui, reviewers))
if blockers:
phids.extend(
- map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
+ map(
+ lambda phid: b'blocking(%s)' % phid,
+ userphids(repo.ui, blockers),
+ )
)
if phids:
actions.append({b'type': b'reviewers.add', b'value': phids})
@@ -1141,24 +1375,40 @@
# Send patches one by one so we know their Differential Revision PHIDs and
# can provide dependency relationship
lastrevphid = None
- for rev in revs:
- ui.debug(b'sending rev %d\n' % rev)
- ctx = repo[rev]
+ for ctx in ctxs:
+ if fold:
+ ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
+ else:
+ ui.debug(b'sending rev %d\n' % ctx.rev())
# Get Differential Revision ID
oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
+ oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
+
+ if fold:
+ oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
+ ctxs[-1].node(), (None, None, None)
+ )
+
if oldnode != ctx.node() or opts.get(b'amend'):
# Create or update Differential Revision
revision, diff = createdifferentialrevision(
- ctx,
+ ctxs if fold else [ctx],
revid,
lastrevphid,
+ oldbasenode,
oldnode,
olddiff,
actions,
opts.get(b'comment'),
)
- diffmap[ctx.node()] = diff
+
+ if fold:
+ for ctx in ctxs:
+ diffmap[ctx.node()] = diff
+ else:
+ diffmap[ctx.node()] = diff
+
newrevid = int(revision[b'object'][b'id'])
newrevphid = revision[b'object'][b'phid']
if revid:
@@ -1168,56 +1418,75 @@
# Create a local tag to note the association, if commit message
# does not have it already
- m = _differentialrevisiondescre.search(ctx.description())
- if not m or int(m.group('id')) != newrevid:
- tagname = b'D%d' % newrevid
- tags.tag(
- repo,
- tagname,
- ctx.node(),
- message=None,
- user=None,
- date=None,
- local=True,
- )
+ if not fold:
+ m = _differentialrevisiondescre.search(ctx.description())
+ if not m or int(m.group('id')) != newrevid:
+ tagname = b'D%d' % newrevid
+ tags.tag(
+ repo,
+ tagname,
+ ctx.node(),
+ message=None,
+ user=None,
+ date=None,
+ local=True,
+ )
else:
# Nothing changed. But still set "newrevphid" so the next revision
# could depend on this one and "newrevid" for the summary line.
- newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
+ newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
newrevid = revid
action = b'skipped'
- actiondesc = ui.label(
- {
- b'created': _(b'created'),
- b'skipped': _(b'skipped'),
- b'updated': _(b'updated'),
- }[action],
- b'phabricator.action.%s' % action,
- )
- drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
- nodedesc = ui.label(bytes(ctx), b'phabricator.node')
- desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
- ui.write(
- _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
- )
drevids.append(newrevid)
lastrevphid = newrevphid
+ if fold:
+ for c in ctxs:
+ if oldmap.get(c.node(), (None, None, None))[2]:
+ action = b'updated'
+ else:
+ action = b'created'
+ _print_phabsend_action(ui, c, newrevid, action)
+ break
+
+ _print_phabsend_action(ui, ctx, newrevid, action)
+
# Update commit messages and remove tags
if opts.get(b'amend'):
unfi = repo.unfiltered()
drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
+ # Eagerly evaluate commits to restabilize before creating new
+ # commits. The selected revisions are excluded because they are
+ # automatically restacked as part of the submission process.
+ restack = [
+ c
+ for c in repo.set(
+ b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
+ revs,
+ revs,
+ )
+ ]
wnode = unfi[b'.'].node()
mapping = {} # {oldnode: [newnode]}
+ newnodes = []
+
+ drevid = drevids[0]
+
for i, rev in enumerate(revs):
old = unfi[rev]
- drevid = drevids[i]
+ if not fold:
+ drevid = drevids[i]
drev = [d for d in drevs if int(d[b'id']) == drevid][0]
- newdesc = getdescfromdrev(drev)
+
+ newdesc = get_amended_desc(drev, old, fold)
# Make sure commit message contain "Differential Revision"
- if old.description() != newdesc:
+ if (
+ old.description() != newdesc
+ or old.p1().node() in mapping
+ or old.p2().node() in mapping
+ ):
if old.phase() == phases.public:
ui.warn(
_(b"warning: not updating public commit %s\n")
@@ -1241,27 +1510,93 @@
newnode = new.commit()
mapping[old.node()] = [newnode]
- # Update diff property
- # If it fails just warn and keep going, otherwise the DREV
- # associations will be lost
- try:
- writediffproperties(unfi[newnode], diffmap[old.node()])
- except util.urlerr.urlerror:
- ui.warnnoi18n(
- b'Failed to update metadata for D%d\n' % drevid
+
+ if fold:
+ # Defer updating the (single) Diff until all nodes are
+ # collected. No tags were created, so none need to be
+ # removed.
+ newnodes.append(newnode)
+ continue
+
+ _amend_diff_properties(
+ unfi, drevid, [newnode], diffmap[old.node()]
+ )
+
+ # Remove local tags since it's no longer necessary
+ tagname = b'D%d' % drevid
+ if tagname in repo.tags():
+ tags.tag(
+ repo,
+ tagname,
+ nullid,
+ message=None,
+ user=None,
+ date=None,
+ local=True,
)
- # Remove local tags since it's no longer necessary
- tagname = b'D%d' % drevid
- if tagname in repo.tags():
- tags.tag(
- repo,
- tagname,
- nullid,
- message=None,
- user=None,
- date=None,
- local=True,
+ elif fold:
+ # When folding multiple commits into one review with
+ # --fold, track even the commits that weren't amended, so
+ # that their association isn't lost if the properties are
+ # rewritten below.
+ newnodes.append(old.node())
+
+ # If the submitted commits are public, no amend takes place so
+ # there are no newnodes and therefore no diff update to do.
+ if fold and newnodes:
+ diff = diffmap[old.node()]
+
+ # The diff object in diffmap doesn't have the local commits
+ # because that could be returned from differential.creatediff,
+ # not differential.querydiffs. So use the queried diff (if
+ # present), or force the amend (a new revision is being posted.)
+ if not olddiff or set(newnodes) != getlocalcommits(olddiff):
+ _debug(ui, b"updating local commit list for D%d\n" % drevid)
+ _amend_diff_properties(unfi, drevid, newnodes, diff)
+ else:
+ _debug(
+ ui,
+ b"local commit list for D%d is already up-to-date\n"
+ % drevid,
)
+ elif fold:
+ _debug(ui, b"no newnodes to update\n")
+
+ # Restack any children of first-time submissions that were orphaned
+ # in the process. The ctx won't report that it is an orphan until
+ # the cleanup takes place below.
+ for old in restack:
+ parents = [
+ mapping.get(old.p1().node(), (old.p1(),))[0],
+ mapping.get(old.p2().node(), (old.p2(),))[0],
+ ]
+ new = context.metadataonlyctx(
+ repo,
+ old,
+ parents=parents,
+ text=old.description(),
+ user=old.user(),
+ date=old.date(),
+ extra=old.extra(),
+ )
+
+ newnode = new.commit()
+
+ # Don't obsolete unselected descendants of nodes that have not
+ # been changed in this transaction- that results in an error.
+ if newnode != old.node():
+ mapping[old.node()] = [newnode]
+ _debug(
+ ui,
+ b"restabilizing %s as %s\n"
+ % (short(old.node()), short(newnode)),
+ )
+ else:
+ _debug(
+ ui,
+ b"not restabilizing unchanged %s\n" % short(old.node()),
+ )
+
scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
if wnode in mapping:
unfi.setparents(mapping[wnode][0])
@@ -1398,7 +1733,7 @@
return drevs, ancestordrevs
-def querydrev(repo, spec):
+def querydrev(ui, spec):
"""return a list of "Differential Revision" dicts
spec is a string using a simple query language, see docstring in phabread
@@ -1407,46 +1742,49 @@
A "Differential Revision dict" looks like:
{
- "id": "2",
- "phid": "PHID-DREV-672qvysjcczopag46qty",
- "title": "example",
- "uri": "https://phab.example.com/D2",
+ "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
+ "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
+ "auxiliary": {
+ "phabricator:depends-on": [
+ "PHID-DREV-gbapp366kutjebt7agcd"
+ ]
+ "phabricator:projects": [],
+ },
+ "branch": "default",
+ "ccs": [],
+ "commits": [],
"dateCreated": "1499181406",
"dateModified": "1499182103",
- "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
- "status": "0",
- "statusName": "Needs Review",
- "properties": [],
- "branch": null,
- "summary": "",
- "testPlan": "",
- "lineCount": "2",
- "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
"diffs": [
"3",
"4",
],
- "commits": [],
+ "hashes": [],
+ "id": "2",
+ "lineCount": "2",
+ "phid": "PHID-DREV-672qvysjcczopag46qty",
+ "properties": {},
+ "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
"reviewers": [],
- "ccs": [],
- "hashes": [],
- "auxiliary": {
- "phabricator:projects": [],
- "phabricator:depends-on": [
- "PHID-DREV-gbapp366kutjebt7agcd"
- ]
- },
- "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
"sourcePath": null
+ "status": "0",
+ "statusName": "Needs Review",
+ "summary": "",
+ "testPlan": "",
+ "title": "example",
+ "uri": "https://phab.example.com/D2",
}
"""
+ # TODO: replace differential.query and differential.querydiffs with
+ # differential.diff.search because the former (and their output) are
+ # frozen, and planned to be deprecated and removed.
def fetch(params):
"""params -> single drev or None"""
key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
if key in prefetched:
return prefetched[key]
- drevs = callconduit(repo.ui, b'differential.query', params)
+ drevs = callconduit(ui, b'differential.query', params)
# Fill prefetched with the result
for drev in drevs:
prefetched[drev[b'phid']] = drev
@@ -1483,7 +1821,7 @@
drevs, ancestordrevs = _prefetchdrevs(tree)
# developer config: phabricator.batchsize
- batchsize = repo.ui.configint(b'phabricator', b'batchsize')
+ batchsize = ui.configint(b'phabricator', b'batchsize')
# Prefetch Differential Revisions in batch
tofetch = set(drevs)
@@ -1537,6 +1875,48 @@
return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
+def get_amended_desc(drev, ctx, folded):
+ """similar to ``getdescfromdrev``, but supports a folded series of commits
+
+ This is used when determining if an individual commit needs to have its
+ message amended after posting it for review. The determination is made for
+ each individual commit, even when they were folded into one review.
+ """
+ if not folded:
+ return getdescfromdrev(drev)
+
+ uri = b'Differential Revision: %s' % drev[b'uri']
+
+ # Since the commit messages were combined when posting multiple commits
+ # with --fold, the fields can't be read from Phabricator here, or *all*
+ # affected local revisions will end up with the same commit message after
+ # the URI is amended in. Append in the DREV line, or update it if it
+ # exists. At worst, this means commit message or test plan updates on
+ # Phabricator aren't propagated back to the repository, but that seems
+ # reasonable for the case where local commits are effectively combined
+ # in Phabricator.
+ m = _differentialrevisiondescre.search(ctx.description())
+ if not m:
+ return b'\n\n'.join([ctx.description(), uri])
+
+ return _differentialrevisiondescre.sub(uri, ctx.description())
+
+
+def getlocalcommits(diff):
+ """get the set of local commits from a diff object
+
+ See ``getdiffmeta()`` for an example diff object.
+ """
+ props = diff.get(b'properties') or {}
+ commits = props.get(b'local:commits') or {}
+ if len(commits) > 1:
+ return {bin(c) for c in commits.keys()}
+
+ # Storing the diff metadata predates storing `local:commits`, so continue
+ # to use that in the --no-fold case.
+ return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
+
+
def getdiffmeta(diff):
"""get commit metadata (date, node, user, p1) from a diff object
@@ -1544,6 +1924,7 @@
"properties": {
"hg:meta": {
+ "branch": "default",
"date": "1499571514 25200",
"node": "98c08acae292b2faf60a279b4189beb6cff1414d",
"user": "Foo Bar <foo@example.com>",
@@ -1557,16 +1938,16 @@
"local:commits": {
"98c08acae292b2faf60a279b4189beb6cff1414d": {
"author": "Foo Bar",
- "time": 1499546314,
+ "authorEmail": "foo@example.com"
"branch": "default",
- "tag": "",
"commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
+ "local": "1000",
+ "message": "...",
+ "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
"rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
- "local": "1000",
- "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
"summary": "...",
- "message": "...",
- "authorEmail": "foo@example.com"
+ "tag": "",
+ "time": 1499546314,
}
}
}
@@ -1605,24 +1986,47 @@
return meta
-def readpatch(repo, drevs, write):
+def _getdrevs(ui, stack, specs):
+ """convert user supplied DREVSPECs into "Differential Revision" dicts
+
+ See ``hg help phabread`` for how to specify each DREVSPEC.
+ """
+ if len(specs) > 0:
+
+ def _formatspec(s):
+ if stack:
+ s = b':(%s)' % s
+ return b'(%s)' % s
+
+ spec = b'+'.join(pycompat.maplist(_formatspec, specs))
+
+ drevs = querydrev(ui, spec)
+ if drevs:
+ return drevs
+
+ raise error.Abort(_(b"empty DREVSPEC set"))
+
+
+def readpatch(ui, drevs, write):
"""generate plain-text patch readable by 'hg import'
- write is usually ui.write. drevs is what "querydrev" returns, results of
+ write takes a list of (DREV, bytes), where DREV is the differential number
+ (as bytes, without the "D" prefix) and the bytes are the text of a patch
+ to be imported. drevs is what "querydrev" returns, results of
"differential.query".
"""
# Prefetch hg:meta property for all diffs
- diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
- diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
+ diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
+ diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
+
+ patches = []
# Generate patch for each drev
for drev in drevs:
- repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
+ ui.note(_(b'reading D%s\n') % drev[b'id'])
diffid = max(int(v) for v in drev[b'diffs'])
- body = callconduit(
- repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
- )
+ body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
desc = getdescfromdrev(drev)
header = b'# HG changeset patch\n'
@@ -1635,22 +2039,28 @@
header += b'# %s %s\n' % (_metanamemap[k], meta[k])
content = b'%s%s\n%s' % (header, desc, body)
- write(content)
+ patches.append((drev[b'id'], content))
+
+ # Write patches to the supplied callback
+ write(patches)
@vcrcommand(
b'phabread',
[(b'', b'stack', False, _(b'read dependencies'))],
- _(b'DREVSPEC [OPTIONS]'),
+ _(b'DREVSPEC... [OPTIONS]'),
helpcategory=command.CATEGORY_IMPORT_EXPORT,
+ optionalrepo=True,
)
-def phabread(ui, repo, spec, **opts):
+def phabread(ui, repo, *specs, **opts):
"""print patches from Phabricator suitable for importing
DREVSPEC could be a Differential Revision identity, like ``D123``, or just
the number ``123``. It could also have common operators like ``+``, ``-``,
``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
- select a stack.
+ select a stack. If multiple DREVSPEC values are given, the result is the
+ union of each individually evaluated value. No attempt is currently made
+ to reorder the values to run from parent to child.
``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
could be used to filter patches by status. For performance reason, they
@@ -1664,10 +2074,74 @@
It is equivalent to the ``:`` operator.
"""
opts = pycompat.byteskwargs(opts)
- if opts.get(b'stack'):
- spec = b':(%s)' % spec
- drevs = querydrev(repo, spec)
- readpatch(repo, drevs, ui.write)
+ drevs = _getdrevs(ui, opts.get(b'stack'), specs)
+
+ def _write(patches):
+ for drev, content in patches:
+ ui.write(content)
+
+ readpatch(ui, drevs, _write)
+
+
+@vcrcommand(
+ b'phabimport',
+ [(b'', b'stack', False, _(b'import dependencies as well'))],
+ _(b'DREVSPEC... [OPTIONS]'),
+ helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
+def phabimport(ui, repo, *specs, **opts):
+ """import patches from Phabricator for the specified Differential Revisions
+
+ The patches are read and applied starting at the parent of the working
+ directory.
+
+ See ``hg help phabread`` for how to specify DREVSPEC.
+ """
+ opts = pycompat.byteskwargs(opts)
+
+ # --bypass avoids losing exec and symlink bits when importing on Windows,
+ # and allows importing with a dirty wdir. It also aborts instead of leaving
+ # rejects.
+ opts[b'bypass'] = True
+
+ # Mandatory default values, synced with commands.import
+ opts[b'strip'] = 1
+ opts[b'prefix'] = b''
+ # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
+ opts[b'obsolete'] = False
+
+ if ui.configbool(b'phabimport', b'secret'):
+ opts[b'secret'] = True
+ if ui.configbool(b'phabimport', b'obsolete'):
+ opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
+
+ def _write(patches):
+ parents = repo[None].parents()
+
+ with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
+ for drev, contents in patches:
+ ui.status(_(b'applying patch from D%s\n') % drev)
+
+ with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
+ msg, node, rej = cmdutil.tryimportone(
+ ui,
+ repo,
+ patchdata,
+ parents,
+ opts,
+ [],
+ None, # Never update wdir to another revision
+ )
+
+ if not node:
+ raise error.Abort(_(b'D%s: no diffs found') % drev)
+
+ ui.note(msg + b'\n')
+ parents = [repo[node]]
+
+ drevs = _getdrevs(ui, opts.get(b'stack'), specs)
+
+ readpatch(repo.ui, drevs, _write)
@vcrcommand(
@@ -1679,10 +2153,11 @@
(b'', b'reclaim', False, _(b'reclaim revisions')),
(b'm', b'comment', b'', _(b'comment on the last revision')),
],
- _(b'DREVSPEC [OPTIONS]'),
+ _(b'DREVSPEC... [OPTIONS]'),
helpcategory=command.CATEGORY_IMPORT_EXPORT,
+ optionalrepo=True,
)
-def phabupdate(ui, repo, spec, **opts):
+def phabupdate(ui, repo, *specs, **opts):
"""update Differential Revision in batch
DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
@@ -1696,7 +2171,7 @@
for f in flags:
actions.append({b'type': f, b'value': True})
- drevs = querydrev(repo, spec)
+ drevs = _getdrevs(ui, opts.get(b'stack'), specs)
for i, drev in enumerate(drevs):
if i + 1 == len(drevs) and opts.get(b'comment'):
actions.append({b'type': b'comment', b'value': opts[b'comment']})
@@ -1759,11 +2234,11 @@
"""Phabricator differiential status"""
revs = repo.revs('sort(_underway(), topo)')
drevmap = getdrevmap(repo, revs)
- unknownrevs, drevids, revsbydrevid = [], set([]), {}
+ unknownrevs, drevids, revsbydrevid = [], set(), {}
for rev, drevid in pycompat.iteritems(drevmap):
if drevid is not None:
drevids.add(drevid)
- revsbydrevid.setdefault(drevid, set([])).add(rev)
+ revsbydrevid.setdefault(drevid, set()).add(rev)
else:
unknownrevs.append(rev)
--- a/hgext/purge.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/purge.py Thu Apr 16 22:51:09 2020 +0530
@@ -48,6 +48,7 @@
[
(b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
(b'', b'all', None, _(b'purge ignored files too')),
+ (b'i', b'ignored', None, _(b'purge only ignored files')),
(b'', b'dirs', None, _(b'purge empty directories')),
(b'', b'files', None, _(b'purge files')),
(b'p', b'print', None, _(b'print filenames instead of deleting them')),
@@ -80,7 +81,7 @@
But it will leave untouched:
- Modified and unmodified tracked files
- - Ignored files (unless --all is specified)
+ - Ignored files (unless -i or --all is specified)
- New files added to the repository (with :hg:`add`)
The --files and --dirs options can be used to direct purge to delete
@@ -96,12 +97,19 @@
option.
'''
opts = pycompat.byteskwargs(opts)
+ cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
act = not opts.get(b'print')
eol = b'\n'
if opts.get(b'print0'):
eol = b'\0'
act = False # --print0 implies --print
+ if opts.get(b'all', False):
+ ignored = True
+ unknown = True
+ else:
+ ignored = opts.get(b'ignored', False)
+ unknown = not ignored
removefiles = opts.get(b'files')
removedirs = opts.get(b'dirs')
@@ -115,7 +123,8 @@
paths = mergemod.purge(
repo,
match,
- ignored=opts.get(b'all', False),
+ unknown=unknown,
+ ignored=ignored,
removeemptydirs=removedirs,
removefiles=removefiles,
abortonerror=opts.get(b'abort_on_err'),
--- a/hgext/rebase.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/rebase.py Thu Apr 16 22:51:09 2020 +0530
@@ -37,6 +37,7 @@
hg,
merge as mergemod,
mergeutil,
+ node as nodemod,
obsolete,
obsutil,
patch,
@@ -177,6 +178,7 @@
# --continue or --abort)), the original repo should be used so
# visibility-dependent revsets are correct.
self.prepared = False
+ self.resume = False
self._repo = repo
self.ui = ui
@@ -366,6 +368,7 @@
_checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
+ self.resume = True
try:
self.restorestatus()
self.collapsemsg = restorecollapsemsg(self.repo, isabort)
@@ -503,7 +506,7 @@
p.complete()
ui.note(_(b'rebase merging completed\n'))
- def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
+ def _concludenode(self, rev, p1, editor, commitmsg=None):
'''Commit the wd changes with parents p1 and p2.
Reuse commit info from rev but also store useful information in extra.
@@ -527,8 +530,6 @@
if self.inmemory:
newnode = commitmemorynode(
repo,
- p1,
- p2,
wctx=self.wctx,
extra=extra,
commitmsg=commitmsg,
@@ -540,8 +541,6 @@
else:
newnode = commitnode(
repo,
- p1,
- p2,
extra=extra,
commitmsg=commitmsg,
editor=editor,
@@ -549,11 +548,6 @@
date=date,
)
- if newnode is None:
- # If it ended up being a no-op commit, then the normal
- # merge state clean-up path doesn't happen, so do it
- # here. Fix issue5494
- mergemod.mergestate.clean(repo)
return newnode
def _rebasenode(self, tr, rev, allowdivergence, progressfn):
@@ -605,8 +599,9 @@
self.skipped,
self.obsoletenotrebased,
)
- if not self.inmemory and len(repo[None].parents()) == 2:
+ if self.resume and self.wctx.p1().rev() == p1:
repo.ui.debug(b'resuming interrupted rebase\n')
+ self.resume = False
else:
overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
with ui.configoverride(overrides, b'rebase'):
@@ -614,6 +609,7 @@
repo,
rev,
p1,
+ p2,
base,
self.collapsef,
dest,
@@ -635,13 +631,15 @@
editor = cmdutil.getcommiteditor(
editform=editform, **pycompat.strkwargs(opts)
)
- newnode = self._concludenode(rev, p1, p2, editor)
+ # We need to set parents again here just in case we're continuing
+ # a rebase started with an old hg version (before 9c9cfecd4600),
+ # because those old versions would have left us with two dirstate
+ # parents, and we don't want to create a merge commit here (unless
+ # we're rebasing a merge commit).
+ self.wctx.setparents(repo[p1].node(), repo[p2].node())
+ newnode = self._concludenode(rev, p1, editor)
else:
# Skip commit if we are collapsing
- if self.inmemory:
- self.wctx.setbase(repo[p1])
- else:
- repo.setparents(repo[p1].node())
newnode = None
# Update the state
if newnode is not None:
@@ -696,8 +694,9 @@
editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
revtoreuse = max(self.state)
+ self.wctx.setparents(repo[p1].node(), repo[self.external].node())
newnode = self._concludenode(
- revtoreuse, p1, self.external, editor, commitmsg=commitmsg
+ revtoreuse, p1, editor, commitmsg=commitmsg
)
if newnode is not None:
@@ -799,9 +798,7 @@
# Update away from the rebase if necessary
if shouldupdate:
- mergemod.update(
- repo, self.originalwd, branchmerge=False, force=True
- )
+ mergemod.clean_update(repo[self.originalwd])
# Strip from the first rebased revision
if rebased:
@@ -824,14 +821,14 @@
(
b's',
b'source',
- b'',
- _(b'rebase the specified changeset and descendants'),
+ [],
+ _(b'rebase the specified changesets and their descendants'),
_(b'REV'),
),
(
b'b',
b'base',
- b'',
+ [],
_(b'rebase everything from branching point of specified changeset'),
_(b'REV'),
),
@@ -880,7 +877,7 @@
+ cmdutil.dryrunopts
+ cmdutil.formatteropts
+ cmdutil.confirmopts,
- _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
+ _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
)
def rebase(ui, repo, **opts):
@@ -1011,10 +1008,10 @@
action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
if action:
cmdutil.check_incompatible_arguments(
- opts, action, b'confirm', b'dry_run'
+ opts, action, [b'confirm', b'dry_run']
)
cmdutil.check_incompatible_arguments(
- opts, action, b'rev', b'source', b'base', b'dest'
+ opts, action, [b'rev', b'source', b'base', b'dest']
)
cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
@@ -1028,7 +1025,7 @@
if opts.get(b'auto_orphans'):
disallowed_opts = set(opts) - {b'auto_orphans'}
cmdutil.check_incompatible_arguments(
- opts, b'auto_orphans', *disallowed_opts
+ opts, b'auto_orphans', disallowed_opts
)
userrevs = list(repo.revs(opts.get(b'auto_orphans')))
@@ -1195,8 +1192,8 @@
repo,
inmemory,
opts.get(b'dest', None),
- opts.get(b'source', None),
- opts.get(b'base', None),
+ opts.get(b'source', []),
+ opts.get(b'base', []),
opts.get(b'rev', []),
destspace=destspace,
)
@@ -1226,16 +1223,7 @@
rbsrt._finishrebase()
-def _definedestmap(
- ui,
- repo,
- inmemory,
- destf=None,
- srcf=None,
- basef=None,
- revf=None,
- destspace=None,
-):
+def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
"""use revisions argument to define destmap {srcrev: destrev}"""
if revf is None:
revf = []
@@ -1261,14 +1249,14 @@
ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
return None
elif srcf:
- src = scmutil.revrange(repo, [srcf])
+ src = scmutil.revrange(repo, srcf)
if not src:
ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
return None
- rebaseset = repo.revs(b'(%ld)::', src)
- assert rebaseset
+ # `+ (%ld)` to work around `wdir()::` being empty
+ rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
else:
- base = scmutil.revrange(repo, [basef or b'.'])
+ base = scmutil.revrange(repo, basef or [b'.'])
if not base:
ui.status(
_(b'empty "base" revision set - ' b"can't compute rebase set\n")
@@ -1341,6 +1329,8 @@
)
return None
+ if nodemod.wdirrev in rebaseset:
+ raise error.Abort(_(b'cannot rebase the working copy'))
rebasingwcp = repo[b'.'].rev() in rebaseset
ui.log(
b"rebase",
@@ -1420,7 +1410,7 @@
)
-def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
+def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
'''Commit the memory changes with parents p1 and p2.
Return node of committed revision.'''
# Replicates the empty check in ``repo.commit``.
@@ -1433,7 +1423,6 @@
if b'branch' in extra:
branch = extra[b'branch']
- wctx.setparents(repo[p1].node(), repo[p2].node())
memctx = wctx.tomemctx(
commitmsg,
date=date,
@@ -1447,15 +1436,13 @@
return commitres
-def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
+def commitnode(repo, editor, extra, user, date, commitmsg):
'''Commit the wd changes with parents p1 and p2.
Return node of committed revision.'''
dsguard = util.nullcontextmanager()
if not repo.ui.configbool(b'rebase', b'singletransaction'):
dsguard = dirstateguard.dirstateguard(repo, b'rebase')
with dsguard:
- repo.setparents(repo[p1].node(), repo[p2].node())
-
# Commit might fail if unresolved files exist
newnode = repo.commit(
text=commitmsg, user=user, date=date, extra=extra, editor=editor
@@ -1465,7 +1452,7 @@
return newnode
-def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
+def rebasenode(repo, rev, p1, p2, base, collapse, dest, wctx):
"""Rebase a single revision rev on top of p1 using base as merge ancestor"""
# Merge phase
# Update to destination and merge it with local
@@ -1475,7 +1462,7 @@
else:
if repo[b'.'].rev() != p1:
repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
- mergemod.update(repo, p1, branchmerge=False, force=True)
+ mergemod.clean_update(p1ctx)
else:
repo.ui.debug(b" already in destination\n")
# This is, alas, necessary to invalidate workingctx's manifest cache,
@@ -1499,6 +1486,7 @@
labels=[b'dest', b'source'],
wc=wctx,
)
+ wctx.setparents(p1ctx.node(), repo[p2].node())
if collapse:
copies.graftcopies(wctx, ctx, repo[dest])
else:
@@ -1678,22 +1666,6 @@
elif p in state and state[p] > 0:
np = state[p]
- # "bases" only record "special" merge bases that cannot be
- # calculated from changelog DAG (i.e. isancestor(p, np) is False).
- # For example:
- #
- # B' # rebase -s B -d D, when B was rebased to B'. dest for C
- # | C # is B', but merge base for C is B, instead of
- # D | # changelog.ancestor(C, B') == A. If changelog DAG and
- # | B # "state" edges are merged (so there will be an edge from
- # |/ # B to B'), the merge base is still ancestor(C, B') in
- # A # the merged graph.
- #
- # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
- # which uses "virtual null merge" to explain this situation.
- if isancestor(p, np):
- bases[i] = nullrev
-
# If one parent becomes an ancestor of the other, drop the ancestor
for j, x in enumerate(newps[:i]):
if x == nullrev:
@@ -1739,12 +1711,6 @@
if any(p != nullrev and isancestor(rev, p) for p in newps):
raise error.Abort(_(b'source is ancestor of destination'))
- # "rebasenode" updates to new p1, use the corresponding merge base.
- if bases[0] != nullrev:
- base = bases[0]
- else:
- base = None
-
# Check if the merge will contain unwanted changes. That may happen if
# there are multiple special (non-changelog ancestor) merge bases, which
# cannot be handled well by the 3-way merge algorithm. For example:
@@ -1760,15 +1726,16 @@
# But our merge base candidates (D and E in above case) could still be
# better than the default (ancestor(F, Z) == null). Therefore still
# pick one (so choose p1 above).
- if sum(1 for b in set(bases) if b != nullrev) > 1:
+ if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
for i, base in enumerate(bases):
- if base == nullrev:
+ if base == nullrev or base in newps:
continue
# Revisions in the side (not chosen as merge base) branch that
# might contain "surprising" contents
+ other_bases = set(bases) - {base}
siderevs = list(
- repo.revs(b'((%ld-%d) %% (%d+%d))', bases, base, base, dest)
+ repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
)
# If those revisions are covered by rebaseset, the result is good.
@@ -1786,35 +1753,40 @@
)
)
- # Choose a merge base that has a minimal number of unwanted revs.
- l, i = min(
- (len(revs), i)
- for i, revs in enumerate(unwanted)
- if revs is not None
- )
- base = bases[i]
-
- # newps[0] should match merge base if possible. Currently, if newps[i]
- # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
- # the other's ancestor. In that case, it's fine to not swap newps here.
- # (see CASE-1 and CASE-2 above)
- if i != 0 and newps[i] != nullrev:
- newps[0], newps[i] = newps[i], newps[0]
+ if any(revs is not None for revs in unwanted):
+ # Choose a merge base that has a minimal number of unwanted revs.
+ l, i = min(
+ (len(revs), i)
+ for i, revs in enumerate(unwanted)
+ if revs is not None
+ )
- # The merge will include unwanted revisions. Abort now. Revisit this if
- # we have a more advanced merge algorithm that handles multiple bases.
- if l > 0:
- unwanteddesc = _(b' or ').join(
- (
- b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
- for revs in unwanted
- if revs is not None
+ # The merge will include unwanted revisions. Abort now. Revisit this if
+ # we have a more advanced merge algorithm that handles multiple bases.
+ if l > 0:
+ unwanteddesc = _(b' or ').join(
+ (
+ b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
+ for revs in unwanted
+ if revs is not None
+ )
)
- )
- raise error.Abort(
- _(b'rebasing %d:%s will include unwanted changes from %s')
- % (rev, repo[rev], unwanteddesc)
- )
+ raise error.Abort(
+ _(b'rebasing %d:%s will include unwanted changes from %s')
+ % (rev, repo[rev], unwanteddesc)
+ )
+
+ # newps[0] should match merge base if possible. Currently, if newps[i]
+ # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
+ # the other's ancestor. In that case, it's fine to not swap newps here.
+ # (see CASE-1 and CASE-2 above)
+ if i != 0:
+ if newps[i] != nullrev:
+ newps[0], newps[i] = newps[i], newps[0]
+ bases[0], bases[i] = bases[i], bases[0]
+
+ # "rebasenode" updates to new p1, use the corresponding merge base.
+ base = bases[0]
repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
@@ -1962,7 +1934,7 @@
# applied patch. But it prevents messing up the working directory when
# a partially completed rebase is blocked by mq.
if b'qtip' in repo.tags():
- mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
+ mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
if set(destmap.values()) & mqapplied:
raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
@@ -2147,7 +2119,7 @@
def _filterobsoleterevs(repo, revs):
"""returns a set of the obsolete revisions in revs"""
- return set(r for r in revs if repo[r].obsolete())
+ return {r for r in revs if repo[r].obsolete()}
def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
--- a/hgext/releasenotes.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/releasenotes.py Thu Apr 16 22:51:09 2020 +0530
@@ -654,7 +654,7 @@
opts = pycompat.byteskwargs(opts)
sections = releasenotessections(ui, repo)
- cmdutil.check_incompatible_arguments(opts, b'list', b'rev', b'check')
+ cmdutil.check_incompatible_arguments(opts, b'list', [b'rev', b'check'])
if opts.get(b'list'):
return _getadmonitionlist(ui, sections)
--- a/hgext/remotefilelog/__init__.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/__init__.py Thu Apr 16 22:51:09 2020 +0530
@@ -737,7 +737,7 @@
# "link" is actually wrong here (it is set to len(changelog))
# if changelog remains unchanged, skip writing file revisions
# but still do a sanity check about pending multiple revisions
- if len(set(x[3] for x in pendingfilecommits)) > 1:
+ if len({x[3] for x in pendingfilecommits}) > 1:
raise error.ProgrammingError(
b'pending multiple integer revisions are not supported'
)
--- a/hgext/remotefilelog/basepack.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/basepack.py Thu Apr 16 22:51:09 2020 +0530
@@ -101,7 +101,7 @@
self._lastpack = pack
yield pack
- cachedpacks = set(pack for pack in self._lrucache)
+ cachedpacks = {pack for pack in self._lrucache}
# Yield for paths not in the cache.
for pack in self._packs - cachedpacks:
self._lastpack = pack
@@ -259,7 +259,7 @@
newpacks = []
if now > self.lastrefresh + REFRESHRATE:
self.lastrefresh = now
- previous = set(p.path for p in self.packs)
+ previous = {p.path for p in self.packs}
for filepath, __, __ in self._getavailablepackfilessorted():
if filepath not in previous:
newpack = self.getpack(filepath)
--- a/hgext/remotefilelog/contentstore.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/contentstore.py Thu Apr 16 22:51:09 2020 +0530
@@ -300,7 +300,7 @@
rl = self._revlog(name)
ancestors = {}
- missing = set((node,))
+ missing = {node}
for ancrev in rl.ancestors([rl.rev(node)], inclusive=True):
ancnode = rl.node(ancrev)
missing.discard(ancnode)
--- a/hgext/remotefilelog/datapack.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/datapack.py Thu Apr 16 22:51:09 2020 +0530
@@ -271,9 +271,9 @@
def cleanup(self, ledger):
entries = ledger.sources.get(self, [])
allkeys = set(self)
- repackedkeys = set(
+ repackedkeys = {
(e.filename, e.node) for e in entries if e.datarepacked or e.gced
- )
+ }
if len(allkeys - repackedkeys) == 0:
if self.path not in ledger.created:
--- a/hgext/remotefilelog/historypack.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/historypack.py Thu Apr 16 22:51:09 2020 +0530
@@ -132,7 +132,7 @@
known = set()
section = self._findsection(name)
filename, offset, size, nodeindexoffset, nodeindexsize = section
- pending = set((node,))
+ pending = {node}
o = 0
while o < size:
if not pending:
@@ -291,9 +291,9 @@
def cleanup(self, ledger):
entries = ledger.sources.get(self, [])
allkeys = set(self)
- repackedkeys = set(
+ repackedkeys = {
(e.filename, e.node) for e in entries if e.historyrepacked
- )
+ }
if len(allkeys - repackedkeys) == 0:
if self.path not in ledger.created:
@@ -452,7 +452,7 @@
sectionstart = self.packfp.tell()
# Write the file section content
- entrymap = dict((e[0], e) for e in entries)
+ entrymap = {e[0]: e for e in entries}
def parentfunc(node):
x, p1, p2, x, x, x = entrymap[node]
--- a/hgext/remotefilelog/remotefilelog.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/remotefilelog.py Thu Apr 16 22:51:09 2020 +0530
@@ -259,6 +259,10 @@
raise RuntimeError(b"len not supported")
+ def heads(self):
+ # Fake heads of the filelog to satisfy hgweb.
+ return []
+
def empty(self):
return False
@@ -429,7 +433,7 @@
return nullid
revmap, parentfunc = self._buildrevgraph(a, b)
- nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap)))
+ nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b])
if ancs:
@@ -444,7 +448,7 @@
return nullid
revmap, parentfunc = self._buildrevgraph(a, b)
- nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap)))
+ nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b])
return map(nodemap.__getitem__, ancs)
--- a/hgext/remotefilelog/repack.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/remotefilelog/repack.py Thu Apr 16 22:51:09 2020 +0530
@@ -321,7 +321,7 @@
def _allpackfileswithsuffix(files, packsuffix, indexsuffix):
result = []
- fileset = set(fn for fn, mode, stat in files)
+ fileset = {fn for fn, mode, stat in files}
for filename, mode, stat in files:
if not filename.endswith(packsuffix):
continue
--- a/hgext/schemes.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/schemes.py Thu Apr 16 22:51:09 2020 +0530
@@ -97,7 +97,7 @@
parts = parts[:-1]
else:
tail = b''
- context = dict((b'%d' % (i + 1), v) for i, v in enumerate(parts))
+ context = {b'%d' % (i + 1): v for i, v in enumerate(parts)}
return b''.join(self.templater.process(self.url, context)) + tail
--- a/hgext/sparse.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/sparse.py Thu Apr 16 22:51:09 2020 +0530
@@ -246,7 +246,7 @@
if changedfiles is not None:
# In _rebuild, these files will be deleted from the dirstate
# when they are not found to be in allfiles
- dirstatefilestoremove = set(f for f in self if not matcher(f))
+ dirstatefilestoremove = {f for f in self if not matcher(f)}
changedfiles = dirstatefilestoremove.union(changedfiles)
return orig(self, parent, allfiles, changedfiles)
--- a/hgext/strip.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/strip.py Thu Apr 16 22:51:09 2020 +0530
@@ -228,7 +228,7 @@
for p in repo.dirstate.parents()
)
- rootnodes = set(cl.node(r) for r in roots)
+ rootnodes = {cl.node(r) for r in roots}
q = getattr(repo, 'mq', None)
if q is not None and q.applied:
--- a/hgext/transplant.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/transplant.py Thu Apr 16 22:51:09 2020 +0530
@@ -761,12 +761,12 @@
def checkopts(opts, revs):
if opts.get(b'continue'):
cmdutil.check_incompatible_arguments(
- opts, b'continue', b'branch', b'all', b'merge'
+ opts, b'continue', [b'branch', b'all', b'merge']
)
return
if opts.get(b'stop'):
cmdutil.check_incompatible_arguments(
- opts, b'stop', b'branch', b'all', b'merge'
+ opts, b'stop', [b'branch', b'all', b'merge']
)
return
if not (
@@ -840,10 +840,10 @@
tf = tp.transplantfilter(repo, source, p1)
if opts.get(b'prune'):
- prune = set(
+ prune = {
source[r].node()
for r in scmutil.revrange(source, opts.get(b'prune'))
- )
+ }
matchfn = lambda x: tf(x) and x not in prune
else:
matchfn = tf
--- a/hgext/uncommit.py Mon Apr 13 16:30:13 2020 +0300
+++ b/hgext/uncommit.py Thu Apr 16 22:51:09 2020 +0530
@@ -65,7 +65,7 @@
base = ctx.p1()
# ctx
initialfiles = set(ctx.files())
- exclude = set(f for f in initialfiles if match(f))
+ exclude = {f for f in initialfiles if match(f)}
# No files matched commit, so nothing excluded
if not exclude:
@@ -78,9 +78,9 @@
files = initialfiles - exclude
# Filter copies
copied = copiesmod.pathcopies(base, ctx)
- copied = dict(
- (dst, src) for dst, src in pycompat.iteritems(copied) if dst in files
- )
+ copied = {
+ dst: src for dst, src in pycompat.iteritems(copied) if dst in files
+ }
def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
if path not in contentctx:
--- a/i18n/polib.py Mon Apr 13 16:30:13 2020 +0300
+++ b/i18n/polib.py Thu Apr 16 22:51:09 2020 +0530
@@ -722,8 +722,8 @@
object POFile, the reference catalog.
"""
# Store entries in dict/set for faster access
- self_entries = dict((entry.msgid, entry) for entry in self)
- refpot_msgids = set(entry.msgid for entry in refpot)
+ self_entries = {entry.msgid: entry for entry in self}
+ refpot_msgids = {entry.msgid for entry in refpot}
# Merge entries that are in the refpot
for entry in refpot:
e = self_entries.get(entry.msgid)
@@ -1808,9 +1808,9 @@
entry = self._build_entry(
msgid=msgid_tokens[0],
msgid_plural=msgid_tokens[1],
- msgstr_plural=dict(
- (k, v) for k, v in enumerate(msgstr.split(b('\0')))
- ),
+ msgstr_plural={
+ k: v for k, v in enumerate(msgstr.split(b('\0')))
+ },
)
else:
entry = self._build_entry(msgid=msgid, msgstr=msgstr)
--- a/mercurial/ancestor.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/ancestor.py Thu Apr 16 22:51:09 2020 +0530
@@ -138,7 +138,7 @@
k = 0
for i in interesting:
k |= i
- return set(n for (i, n) in mapping if k & i)
+ return {n for (i, n) in mapping if k & i}
gca = commonancestorsheads(pfunc, *orignodes)
@@ -393,39 +393,3 @@
# free up memory.
self._containsiter = None
return False
-
-
-class rustlazyancestors(object):
- def __init__(self, index, revs, stoprev=0, inclusive=False):
- self._index = index
- self._stoprev = stoprev
- self._inclusive = inclusive
- # no need to prefilter out init revs that are smaller than stoprev,
- # it's done by rustlazyancestors constructor.
- # we need to convert to a list, because our ruslazyancestors
- # constructor (from C code) doesn't understand anything else yet
- self._initrevs = initrevs = list(revs)
-
- self._containsiter = parsers.rustlazyancestors(
- index, initrevs, stoprev, inclusive
- )
-
- def __nonzero__(self):
- """False if the set is empty, True otherwise.
-
- It's better to duplicate this essentially trivial method than
- to subclass lazyancestors
- """
- try:
- next(iter(self))
- return True
- except StopIteration:
- return False
-
- def __iter__(self):
- return parsers.rustlazyancestors(
- self._index, self._initrevs, self._stoprev, self._inclusive
- )
-
- def __contains__(self, target):
- return target in self._containsiter
--- a/mercurial/archival.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/archival.py Thu Apr 16 22:51:09 2020 +0530
@@ -359,7 +359,7 @@
if match(name):
write(name, 0o644, False, lambda: buildmetadata(ctx))
- files = [f for f in ctx.manifest().matches(match)]
+ files = list(ctx.manifest().walk(match))
total = len(files)
if total:
files.sort()
--- a/mercurial/bookmarks.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/bookmarks.py Thu Apr 16 22:51:09 2020 +0530
@@ -173,6 +173,8 @@
nrefs.sort()
def _del(self, mark):
+ if mark not in self._refmap:
+ return
self._clean = False
node = self._refmap.pop(mark)
nrefs = self._nodemap[node]
@@ -461,6 +463,10 @@
return bool(bmchanges)
+def isdivergent(b):
+ return b'@' in b and not b.endswith(b'@')
+
+
def listbinbookmarks(repo):
# We may try to list bookmarks on a repo type that does not
# support it (e.g., statichttprepository).
@@ -469,7 +475,7 @@
hasnode = repo.changelog.hasnode
for k, v in pycompat.iteritems(marks):
# don't expose local divergent bookmarks
- if hasnode(v) and (b'@' not in k or k.endswith(b'@')):
+ if hasnode(v) and not isdivergent(k):
yield k, v
@@ -481,6 +487,8 @@
def pushbookmark(repo, key, old, new):
+ if isdivergent(key):
+ return False
if bookmarksinstore(repo):
wlock = util.nullcontextmanager()
else:
--- a/mercurial/branchmap.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/branchmap.py Thu Apr 16 22:51:09 2020 +0530
@@ -291,8 +291,8 @@
% (
_branchcachedesc(repo),
pycompat.bytestr(
- inst # pytype: disable=wrong-arg-types
- ),
+ inst
+ ), # pytype: disable=wrong-arg-types
)
)
bcache = None
@@ -446,7 +446,7 @@
# 1 (branch a) -> 2 (branch b) -> 3 (branch a)
for branch, newheadrevs in pycompat.iteritems(newbranches):
bheads = self._entries.setdefault(branch, [])
- bheadset = set(cl.rev(node) for node in bheads)
+ bheadset = {cl.rev(node) for node in bheads}
# This have been tested True on all internal usage of this function.
# run it again in case of doubt
@@ -582,7 +582,7 @@
@util.propertycache
def _namesreverse(self):
- return dict((b, r) for r, b in enumerate(self._names))
+ return {b: r for r, b in enumerate(self._names)}
def branchinfo(self, rev):
"""Return branch name and close flag for rev, using and updating
--- a/mercurial/bundle2.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/bundle2.py Thu Apr 16 22:51:09 2020 +0530
@@ -2368,6 +2368,11 @@
b'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
)
+ for book, node in changes:
+ if bookmarks.isdivergent(book):
+ msg = _(b'cannot accept divergent bookmark %s!') % book
+ raise error.Abort(msg)
+
bookstore.applychanges(op.repo, op.gettransaction(), changes)
if pushkeycompat:
--- a/mercurial/cext/manifest.c Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/cext/manifest.c Thu Apr 16 22:51:09 2020 +0530
@@ -53,21 +53,35 @@
{
char *s = l->start;
Py_ssize_t llen = pathlen(l);
+ Py_ssize_t hlen = l->len - llen - 2;
+ Py_ssize_t hlen_raw = 20;
PyObject *hash;
if (llen + 1 + 40 + 1 > l->len) { /* path '\0' hash '\n' */
PyErr_SetString(PyExc_ValueError, "manifest line too short");
return NULL;
}
- hash = unhexlify(s + llen + 1, 40);
+ switch (hlen) {
+ case 40: /* sha1 */
+ case 41: /* sha1 with cruft for a merge */
+ break;
+ case 64: /* new hash */
+ case 65: /* new hash with cruft for a merge */
+ hlen_raw = 32;
+ break;
+ default:
+ PyErr_SetString(PyExc_ValueError, "invalid node length in manifest");
+ return NULL;
+ }
+ hash = unhexlify(s + llen + 1, hlen_raw * 2);
if (!hash) {
return NULL;
}
if (l->hash_suffix != '\0') {
- char newhash[21];
- memcpy(newhash, PyBytes_AsString(hash), 20);
+ char newhash[33];
+ memcpy(newhash, PyBytes_AsString(hash), hlen_raw);
Py_DECREF(hash);
- newhash[20] = l->hash_suffix;
- hash = PyBytes_FromStringAndSize(newhash, 21);
+ newhash[hlen_raw] = l->hash_suffix;
+ hash = PyBytes_FromStringAndSize(newhash, hlen_raw+1);
}
return hash;
}
@@ -78,15 +92,20 @@
char *s = l->start;
Py_ssize_t plen = pathlen(l);
PyObject *hash = nodeof(l);
-
- /* 40 for hash, 1 for null byte, 1 for newline */
- Py_ssize_t hplen = plen + 42;
- Py_ssize_t flen = l->len - hplen;
+ ssize_t hlen;
+ Py_ssize_t hplen, flen;
PyObject *flags;
PyObject *tup;
if (!hash)
return NULL;
+ /* hash is either 20 or 21 bytes for an old hash, so we use a
+ ternary here to get the "real" hexlified sha length. */
+ hlen = PyBytes_GET_SIZE(hash) < 22 ? 40 : 64;
+ /* 1 for null byte, 1 for newline */
+ hplen = plen + hlen + 2;
+ flen = l->len - hplen;
+
flags = PyBytes_FromStringAndSize(s + hplen - 1, flen);
if (!flags) {
Py_DECREF(hash);
--- a/mercurial/cext/osutil.c Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/cext/osutil.c Thu Apr 16 22:51:09 2020 +0530
@@ -810,9 +810,10 @@
/* Check the memory we can use. Typically, argv[i] and
* argv[i + 1] are continuous. */
for (i = 0; i < argc; ++i) {
+ size_t len;
if (argv[i] > argvend || argv[i] < argvstart)
break; /* not continuous */
- size_t len = strlen(argv[i]);
+ len = strlen(argv[i]);
argvend = argv[i] + len + 1 /* '\0' */;
}
if (argvend > argvstart) /* sanity check */
@@ -1169,10 +1170,10 @@
static PyObject *unblocksignal(PyObject *self, PyObject *args)
{
int sig = 0;
+ sigset_t set;
int r;
if (!PyArg_ParseTuple(args, "i", &sig))
return NULL;
- sigset_t set;
r = sigemptyset(&set);
if (r != 0)
return PyErr_SetFromErrno(PyExc_OSError);
--- a/mercurial/cext/revlog.c Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/cext/revlog.c Thu Apr 16 22:51:09 2020 +0530
@@ -39,6 +39,8 @@
typedef struct {
int abi_version;
+ Py_ssize_t (*index_length)(const indexObject *);
+ const char *(*index_node)(indexObject *, Py_ssize_t);
int (*index_parents)(PyObject *, int, int *);
} Revlog_CAPI;
@@ -212,7 +214,7 @@
*
* Returns 0 on success or -1 on failure.
*/
-int HgRevlogIndex_GetParents(PyObject *op, int rev, int *ps)
+static int HgRevlogIndex_GetParents(PyObject *op, int rev, int *ps)
{
int tiprev;
if (!op || !HgRevlogIndex_Check(op) || !ps) {
@@ -2878,173 +2880,12 @@
return NULL;
}
-#ifdef WITH_RUST
-
-/* rustlazyancestors: iteration over ancestors implemented in Rust
- *
- * This class holds a reference to an index and to the Rust iterator.
- */
-typedef struct rustlazyancestorsObjectStruct rustlazyancestorsObject;
-
-struct rustlazyancestorsObjectStruct {
- PyObject_HEAD
- /* Type-specific fields go here. */
- indexObject *index; /* Ref kept to avoid GC'ing the index */
- void *iter; /* Rust iterator */
-};
-
-/* FFI exposed from Rust code */
-rustlazyancestorsObject *rustlazyancestors_init(indexObject *index,
- /* intrevs vector */
- Py_ssize_t initrevslen,
- long *initrevs, long stoprev,
- int inclusive);
-void rustlazyancestors_drop(rustlazyancestorsObject *self);
-int rustlazyancestors_next(rustlazyancestorsObject *self);
-int rustlazyancestors_contains(rustlazyancestorsObject *self, long rev);
-
-/* CPython instance methods */
-static int rustla_init(rustlazyancestorsObject *self, PyObject *args)
-{
- PyObject *initrevsarg = NULL;
- PyObject *inclusivearg = NULL;
- long stoprev = 0;
- long *initrevs = NULL;
- int inclusive = 0;
- Py_ssize_t i;
-
- indexObject *index;
- if (!PyArg_ParseTuple(args, "O!O!lO!", &HgRevlogIndex_Type, &index,
- &PyList_Type, &initrevsarg, &stoprev,
- &PyBool_Type, &inclusivearg))
- return -1;
-
- Py_INCREF(index);
- self->index = index;
-
- if (inclusivearg == Py_True)
- inclusive = 1;
-
- Py_ssize_t linit = PyList_GET_SIZE(initrevsarg);
-
- initrevs = (long *)calloc(linit, sizeof(long));
-
- if (initrevs == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- for (i = 0; i < linit; i++) {
- initrevs[i] = PyInt_AsLong(PyList_GET_ITEM(initrevsarg, i));
- }
- if (PyErr_Occurred())
- goto bail;
-
- self->iter =
- rustlazyancestors_init(index, linit, initrevs, stoprev, inclusive);
- if (self->iter == NULL) {
- /* if this is because of GraphError::ParentOutOfRange
- * HgRevlogIndex_GetParents() has already set the proper
- * exception */
- goto bail;
- }
-
- free(initrevs);
- return 0;
-
-bail:
- free(initrevs);
- return -1;
-};
-
-static void rustla_dealloc(rustlazyancestorsObject *self)
-{
- Py_XDECREF(self->index);
- if (self->iter != NULL) { /* can happen if rustla_init failed */
- rustlazyancestors_drop(self->iter);
- }
- PyObject_Del(self);
-}
-
-static PyObject *rustla_next(rustlazyancestorsObject *self)
-{
- int res = rustlazyancestors_next(self->iter);
- if (res == -1) {
- /* Setting an explicit exception seems unnecessary
- * as examples from Python source code (Objects/rangeobjets.c
- * and Modules/_io/stringio.c) seem to demonstrate.
- */
- return NULL;
- }
- return PyInt_FromLong(res);
-}
-
-static int rustla_contains(rustlazyancestorsObject *self, PyObject *rev)
-{
- long lrev;
- if (!pylong_to_long(rev, &lrev)) {
- PyErr_Clear();
- return 0;
- }
- return rustlazyancestors_contains(self->iter, lrev);
-}
-
-static PySequenceMethods rustla_sequence_methods = {
- 0, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- 0, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)rustla_contains, /* sq_contains */
-};
-
-static PyTypeObject rustlazyancestorsType = {
- PyVarObject_HEAD_INIT(NULL, 0) /* header */
- "parsers.rustlazyancestors", /* tp_name */
- sizeof(rustlazyancestorsObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)rustla_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &rustla_sequence_methods, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "Iterator over ancestors, implemented in Rust", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- (iternextfunc)rustla_next, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)rustla_init, /* tp_init */
- 0, /* tp_alloc */
-};
-#endif /* WITH_RUST */
-
static Revlog_CAPI CAPI = {
/* increment the abi_version field upon each change in the Revlog_CAPI
struct or in the ABI of the listed functions */
- 1,
+ 2,
+ index_length,
+ index_node,
HgRevlogIndex_GetParents,
};
@@ -3074,13 +2915,4 @@
caps = PyCapsule_New(&CAPI, "mercurial.cext.parsers.revlog_CAPI", NULL);
if (caps != NULL)
PyModule_AddObject(mod, "revlog_CAPI", caps);
-
-#ifdef WITH_RUST
- rustlazyancestorsType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&rustlazyancestorsType) < 0)
- return;
- Py_INCREF(&rustlazyancestorsType);
- PyModule_AddObject(mod, "rustlazyancestors",
- (PyObject *)&rustlazyancestorsType);
-#endif
}
--- a/mercurial/cext/revlog.h Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/cext/revlog.h Thu Apr 16 22:51:09 2020 +0530
@@ -14,6 +14,4 @@
#define HgRevlogIndex_Check(op) PyObject_TypeCheck(op, &HgRevlogIndex_Type)
-int HgRevlogIndex_GetParents(PyObject *op, int rev, int *ps);
-
#endif /* _HG_REVLOG_H_ */
--- a/mercurial/changegroup.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/changegroup.py Thu Apr 16 22:51:09 2020 +0530
@@ -993,7 +993,7 @@
]
manifests.clear()
- clrevs = set(cl.rev(x) for x in clnodes)
+ clrevs = {cl.rev(x) for x in clnodes}
it = self.generatefiles(
changedfiles,
@@ -1149,8 +1149,8 @@
if fastpathlinkrev:
assert not tree
return (
- manifests.__getitem__ # pytype: disable=unsupported-operands
- )
+ manifests.__getitem__
+ ) # pytype: disable=unsupported-operands
def lookupmflinknode(x):
"""Callback for looking up the linknode for manifests.
@@ -1282,9 +1282,7 @@
flinkrev = store.linkrev
fnode = store.node
revs = ((r, flinkrev(r)) for r in store)
- return dict(
- (fnode(r), cln(lr)) for r, lr in revs if lr in clrevs
- )
+ return {fnode(r): cln(lr) for r, lr in revs if lr in clrevs}
clrevtolocalrev = {}
--- a/mercurial/changelog.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/changelog.py Thu Apr 16 22:51:09 2020 +0530
@@ -161,15 +161,18 @@
return self.fp.__exit__(*args)
-def _divertopener(opener, target):
- """build an opener that writes in 'target.a' instead of 'target'"""
+class _divertopener(object):
+ def __init__(self, opener, target):
+ self._opener = opener
+ self._target = target
- def _divert(name, mode=b'r', checkambig=False, **kwargs):
- if name != target:
- return opener(name, mode, **kwargs)
- return opener(name + b".a", mode, **kwargs)
+ def __call__(self, name, mode=b'r', checkambig=False, **kwargs):
+ if name != self._target:
+ return self._opener(name, mode, **kwargs)
+ return self._opener(name + b".a", mode, **kwargs)
- return _divert
+ def __getattr__(self, attr):
+ return getattr(self._opener, attr)
def _delayopener(opener, target, buf):
@@ -382,6 +385,9 @@
datafile=datafile,
checkambig=True,
mmaplargeindex=True,
+ persistentnodemap=opener.options.get(
+ b'exp-persistent-nodemap', False
+ ),
)
if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
--- a/mercurial/chgserver.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/chgserver.py Thu Apr 16 22:51:09 2020 +0530
@@ -80,9 +80,12 @@
# sensitive config sections affecting confighash
_configsections = [
b'alias', # affects global state commands.table
+ b'diff-tools', # affects whether gui or not in extdiff's uisetup
b'eol', # uses setconfig('eol', ...)
b'extdiff', # uisetup will register new commands
b'extensions',
+ b'fastannotate', # affects annotate command and adds fastannonate cmd
+ b'merge-tools', # affects whether gui or not in extdiff's uisetup
b'schemes', # extsetup will update global hg.schemes
]
@@ -525,7 +528,7 @@
def _setumask(self, data):
mask = struct.unpack(b'>I', data)[0]
self.ui.log(b'chgserver', b'setumask %r\n', mask)
- os.umask(mask)
+ util.setumask(mask)
def runcommand(self):
# pager may be attached within the runcommand session, which should
@@ -551,40 +554,6 @@
raise ValueError(b'unexpected value in setenv request')
self.ui.log(b'chgserver', b'setenv: %r\n', sorted(newenv.keys()))
- # Python3 has some logic to "coerce" the C locale to a UTF-8 capable
- # one, and it sets LC_CTYPE in the environment to C.UTF-8 if none of
- # 'LC_CTYPE', 'LC_ALL' or 'LANG' are set (to any value). This can be
- # disabled with PYTHONCOERCECLOCALE=0 in the environment.
- #
- # When fromui is called via _inithashstate, python has already set
- # this, so that's in the environment right when we start up the hg
- # process. Then chg will call us and tell us to set the environment to
- # the one it has; this might NOT have LC_CTYPE, so we'll need to
- # carry-forward the LC_CTYPE that was coerced in these situations.
- #
- # If this is not handled, we will fail config+env validation and fail
- # to start chg. If this is just ignored instead of carried forward, we
- # may have different behavior between chg and non-chg.
- if pycompat.ispy3:
- # Rename for wordwrapping purposes
- oldenv = encoding.environ
- if not any(
- e.get(b'PYTHONCOERCECLOCALE') == b'0' for e in [oldenv, newenv]
- ):
- keys = [b'LC_CTYPE', b'LC_ALL', b'LANG']
- old_keys = [k for k, v in oldenv.items() if k in keys and v]
- new_keys = [k for k, v in newenv.items() if k in keys and v]
- # If the user's environment (from chg) doesn't have ANY of the
- # keys that python looks for, and the environment (from
- # initialization) has ONLY LC_CTYPE and it's set to C.UTF-8,
- # carry it forward.
- if (
- not new_keys
- and old_keys == [b'LC_CTYPE']
- and oldenv[b'LC_CTYPE'] == b'C.UTF-8'
- ):
- newenv[b'LC_CTYPE'] = oldenv[b'LC_CTYPE']
-
encoding.environ.clear()
encoding.environ.update(newenv)
@@ -731,6 +700,16 @@
# environ cleaner.
if b'CHGINTERNALMARK' in encoding.environ:
del encoding.environ[b'CHGINTERNALMARK']
+ # Python3.7+ "coerces" the LC_CTYPE environment variable to a UTF-8 one if
+ # it thinks the current value is "C". This breaks the hash computation and
+ # causes chg to restart loop.
+ if b'CHGORIG_LC_CTYPE' in encoding.environ:
+ encoding.environ[b'LC_CTYPE'] = encoding.environ[b'CHGORIG_LC_CTYPE']
+ del encoding.environ[b'CHGORIG_LC_CTYPE']
+ elif b'CHG_CLEAR_LC_CTYPE' in encoding.environ:
+ if b'LC_CTYPE' in encoding.environ:
+ del encoding.environ[b'LC_CTYPE']
+ del encoding.environ[b'CHG_CLEAR_LC_CTYPE']
if repo:
# one chgserver can serve multiple repos. drop repo information
--- a/mercurial/cmdutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/cmdutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -170,7 +170,12 @@
diffopts = [
(b'a', b'text', None, _(b'treat all files as text')),
- (b'g', b'git', None, _(b'use git extended diff format')),
+ (
+ b'g',
+ b'git',
+ None,
+ _(b'use git extended diff format (DEFAULT: diff.git)'),
+ ),
(b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
(b'', b'nodates', None, _(b'omit dates from diff headers')),
]
@@ -209,7 +214,9 @@
b'p',
b'show-function',
None,
- _(b'show which function each change is in'),
+ _(
+ b'show which function each change is in (DEFAULT: diff.showfunc)'
+ ),
),
(b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
]
@@ -281,11 +288,11 @@
return previous
-def check_incompatible_arguments(opts, first, *others):
+def check_incompatible_arguments(opts, first, others):
"""abort if the first argument is given along with any of the others
Unlike check_at_most_one_arg(), `others` are not mutually exclusive
- among themselves.
+ among themselves, and they're passed as a single collection.
"""
for other in others:
check_at_most_one_arg(opts, first, other)
@@ -584,15 +591,8 @@
[os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
# 3a. apply filtered patch to clean repo (clean)
if backups:
- # Equivalent to hg.revert
m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
- mergemod.update(
- repo,
- repo.dirstate.p1(),
- branchmerge=False,
- force=True,
- matcher=m,
- )
+ mergemod.revert_to(repo[b'.'], matcher=m)
# 3b. (apply)
if dopatch:
@@ -1414,46 +1414,165 @@
def copy(ui, repo, pats, opts, rename=False):
+ check_incompatible_arguments(opts, b'forget', [b'dry_run'])
+
# called with the repo lock held
#
# hgsep => pathname that uses "/" to separate directories
# ossep => pathname that uses os.sep to separate directories
cwd = repo.getcwd()
targets = {}
+ forget = opts.get(b"forget")
after = opts.get(b"after")
dryrun = opts.get(b"dry_run")
- wctx = repo[None]
+ rev = opts.get(b'at_rev')
+ if rev:
+ if not forget and not after:
+ # TODO: Remove this restriction and make it also create the copy
+ # targets (and remove the rename source if rename==True).
+ raise error.Abort(_(b'--at-rev requires --after'))
+ ctx = scmutil.revsingle(repo, rev)
+ if len(ctx.parents()) > 1:
+ raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
+ else:
+ ctx = repo[None]
+
+ pctx = ctx.p1()
uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
+ if forget:
+ if ctx.rev() is None:
+ new_ctx = ctx
+ else:
+ if len(ctx.parents()) > 1:
+ raise error.Abort(_(b'cannot unmark copy in merge commit'))
+ # avoid cycle context -> subrepo -> cmdutil
+ from . import context
+
+ rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
+ new_ctx = context.overlayworkingctx(repo)
+ new_ctx.setbase(ctx.p1())
+ mergemod.graft(repo, ctx, wctx=new_ctx)
+
+ match = scmutil.match(ctx, pats, opts)
+
+ current_copies = ctx.p1copies()
+ current_copies.update(ctx.p2copies())
+
+ uipathfn = scmutil.getuipathfn(repo)
+ for f in ctx.walk(match):
+ if f in current_copies:
+ new_ctx[f].markcopied(None)
+ elif match.exact(f):
+ ui.warn(
+ _(
+ b'%s: not unmarking as copy - file is not marked as copied\n'
+ )
+ % uipathfn(f)
+ )
+
+ if ctx.rev() is not None:
+ with repo.lock():
+ mem_ctx = new_ctx.tomemctx_for_amend(ctx)
+ new_node = mem_ctx.commit()
+
+ if repo.dirstate.p1() == ctx.node():
+ with repo.dirstate.parentchange():
+ scmutil.movedirstate(repo, repo[new_node])
+ replacements = {ctx.node(): [new_node]}
+ scmutil.cleanupnodes(
+ repo, replacements, b'uncopy', fixphase=True
+ )
+
+ return
+
+ pats = scmutil.expandpats(pats)
+ if not pats:
+ raise error.Abort(_(b'no source or destination specified'))
+ if len(pats) == 1:
+ raise error.Abort(_(b'no destination specified'))
+ dest = pats.pop()
+
def walkpat(pat):
srcs = []
- if after:
- badstates = b'?'
- else:
- badstates = b'?r'
- m = scmutil.match(wctx, [pat], opts, globbed=True)
- for abs in wctx.walk(m):
- state = repo.dirstate[abs]
+ m = scmutil.match(ctx, [pat], opts, globbed=True)
+ for abs in ctx.walk(m):
rel = uipathfn(abs)
exact = m.exact(abs)
- if state in badstates:
- if exact and state == b'?':
- ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
- if exact and state == b'r':
- ui.warn(
- _(
- b'%s: not copying - file has been marked for'
- b' remove\n'
+ if abs not in ctx:
+ if abs in pctx:
+ if not after:
+ if exact:
+ ui.warn(
+ _(
+ b'%s: not copying - file has been marked '
+ b'for remove\n'
+ )
+ % rel
+ )
+ continue
+ else:
+ if exact:
+ ui.warn(
+ _(b'%s: not copying - file is not managed\n') % rel
)
- % rel
- )
- continue
+ continue
+
# abs: hgsep
# rel: ossep
srcs.append((abs, rel, exact))
return srcs
+ if ctx.rev() is not None:
+ rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
+ absdest = pathutil.canonpath(repo.root, cwd, dest)
+ if ctx.hasdir(absdest):
+ raise error.Abort(
+ _(b'%s: --at-rev does not support a directory as destination')
+ % uipathfn(absdest)
+ )
+ if absdest not in ctx:
+ raise error.Abort(
+ _(b'%s: copy destination does not exist in %s')
+ % (uipathfn(absdest), ctx)
+ )
+
+ # avoid cycle context -> subrepo -> cmdutil
+ from . import context
+
+ copylist = []
+ for pat in pats:
+ srcs = walkpat(pat)
+ if not srcs:
+ continue
+ for abs, rel, exact in srcs:
+ copylist.append(abs)
+
+ # TODO: Add support for `hg cp --at-rev . foo bar dir` and
+ # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
+ # existing functions below.
+ if len(copylist) != 1:
+ raise error.Abort(_(b'--at-rev requires a single source'))
+
+ new_ctx = context.overlayworkingctx(repo)
+ new_ctx.setbase(ctx.p1())
+ mergemod.graft(repo, ctx, wctx=new_ctx)
+
+ new_ctx.markcopied(absdest, copylist[0])
+
+ with repo.lock():
+ mem_ctx = new_ctx.tomemctx_for_amend(ctx)
+ new_node = mem_ctx.commit()
+
+ if repo.dirstate.p1() == ctx.node():
+ with repo.dirstate.parentchange():
+ scmutil.movedirstate(repo, repo[new_node])
+ replacements = {ctx.node(): [new_node]}
+ scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
+
+ return
+
# abssrc: hgsep
# relsrc: ossep
# otarget: ossep
@@ -1583,13 +1702,13 @@
# fix up dirstate
scmutil.dirstatecopy(
- ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
+ ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
)
if rename and not dryrun:
if not after and srcexists and not samefile:
rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
- wctx.forget([abssrc])
+ ctx.forget([abssrc])
# pat: ossep
# dest ossep
@@ -1659,12 +1778,6 @@
res = lambda p: dest
return res
- pats = scmutil.expandpats(pats)
- if not pats:
- raise error.Abort(_(b'no source or destination specified'))
- if len(pats) == 1:
- raise error.Abort(_(b'no destination specified'))
- dest = pats.pop()
destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
if not destdirexists:
if len(pats) > 1 or matchmod.patkind(pats[0]):
@@ -3012,7 +3125,7 @@
ms = mergemod.mergestate.read(repo)
mergeutil.checkunresolved(ms)
- filestoamend = set(f for f in wctx.files() if matcher(f))
+ filestoamend = {f for f in wctx.files() if matcher(f)}
changes = len(filestoamend) > 0
if changes:
@@ -3804,7 +3917,7 @@
# Apply changes
fp = stringio()
# chunks are serialized per file, but files aren't sorted
- for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
+ for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
prntstatusmsg(b'revert', f)
files = set()
for c in chunks:
--- a/mercurial/color.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/color.py Thu Apr 16 22:51:09 2020 +0530
@@ -44,7 +44,7 @@
b'cyan': (False, curses.COLOR_CYAN, b''),
b'white': (False, curses.COLOR_WHITE, b''),
}
-except ImportError:
+except (ImportError, AttributeError):
curses = None
_baseterminfoparams = {}
--- a/mercurial/commands.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/commands.py Thu Apr 16 22:51:09 2020 +0530
@@ -876,7 +876,7 @@
)
overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
with ui.configoverride(overrides, b'backout'):
- return hg.merge(repo, hex(repo.changelog.tip()))
+ return hg.merge(repo[b'tip'])
return 0
@@ -1228,7 +1228,7 @@
action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
if action:
- cmdutil.check_incompatible_arguments(opts, action, b'rev')
+ cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
elif names or rev:
action = b'add'
elif inactive:
@@ -1236,7 +1236,9 @@
else:
action = b'list'
- cmdutil.check_incompatible_arguments(opts, b'inactive', b'delete', b'list')
+ cmdutil.check_incompatible_arguments(
+ opts, b'inactive', [b'delete', b'list']
+ )
if not names and action in {b'add', b'delete'}:
raise error.Abort(_(b"bookmark name required"))
@@ -2307,8 +2309,16 @@
@command(
b'copy|cp',
[
+ (b'', b'forget', None, _(b'unmark a file as copied')),
(b'A', b'after', None, _(b'record a copy that has already occurred')),
(
+ b'',
+ b'at-rev',
+ b'',
+ _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
+ _(b'REV'),
+ ),
+ (
b'f',
b'force',
None,
@@ -2331,8 +2341,11 @@
exist in the working directory. If invoked with -A/--after, the
operation is recorded, but no copying is performed.
- This command takes effect with the next commit. To undo a copy
- before that, see :hg:`revert`.
+ To undo marking a file as copied, use --forget. With that option,
+ all given (positional) arguments are unmarked as copies. The destination
+ file(s) will be left in place (still tracked).
+
+ This command takes effect with the next commit by default.
Returns 0 on success, 1 if errors are encountered.
"""
@@ -2938,7 +2951,7 @@
See :hg:`help revisions` for more about specifying revisions.
- Returns 0 on successful completion.
+ Returns 0 on successful completion, 1 if there are unresolved files.
'''
with repo.wlock():
return _dograft(ui, repo, *revs, **opts)
@@ -3199,10 +3212,9 @@
statedata[b'nodes'] = nodes
stateversion = 1
graftstate.save(stateversion, statedata)
- hint = _(b"use 'hg resolve' and 'hg graft --continue'")
- raise error.Abort(
- _(b"unresolved conflicts, can't continue"), hint=hint
- )
+ ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
+ ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
+ return 1
else:
cont = False
@@ -3708,9 +3720,9 @@
heads = [repo[h] for h in heads]
if branchrevs:
- branches = set(
+ branches = {
repo[r].branch() for r in scmutil.revrange(repo, branchrevs)
- )
+ }
heads = [h for h in heads if h.branch() in branches]
if opts.get(b'active') and branchrevs:
@@ -3718,7 +3730,7 @@
heads = [h for h in heads if h.node() in dagheads]
if branchrevs:
- haveheads = set(h.branch() for h in heads)
+ haveheads = {h.branch() for h in heads}
if branches - haveheads:
headless = b', '.join(b for b in branches - haveheads)
msg = _(b'no open branch heads found on branches %s')
@@ -4847,6 +4859,7 @@
abort = opts.get(b'abort')
if abort and repo.dirstate.p2() == nullid:
cmdutil.wrongtooltocontinue(repo, _(b'merge'))
+ cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
if abort:
state = cmdutil.getunfinishedstate(repo)
if state and state._opname != b'merge':
@@ -4856,19 +4869,16 @@
)
if node:
raise error.Abort(_(b"cannot specify a node with --abort"))
- if opts.get(b'rev'):
- raise error.Abort(_(b"cannot specify both --rev and --abort"))
- if opts.get(b'preview'):
- raise error.Abort(_(b"cannot specify --preview with --abort"))
+ return hg.abortmerge(repo.ui, repo)
+
if opts.get(b'rev') and node:
raise error.Abort(_(b"please specify just one revision"))
if not node:
node = opts.get(b'rev')
if node:
- node = scmutil.revsingle(repo, node).node()
-
- if not node and not abort:
+ ctx = scmutil.revsingle(repo, node)
+ else:
if ui.configbool(b'commands', b'merge.require-rev'):
raise error.Abort(
_(
@@ -4876,12 +4886,15 @@
b'with'
)
)
- node = repo[destutil.destmerge(repo)].node()
+ ctx = repo[destutil.destmerge(repo)]
+
+ if ctx.node() is None:
+ raise error.Abort(_(b'merging with the working copy has no effect'))
if opts.get(b'preview'):
# find nodes that are ancestors of p2 but not of p1
- p1 = repo.lookup(b'.')
- p2 = node
+ p1 = repo[b'.'].node()
+ p2 = ctx.node()
nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
@@ -4895,14 +4908,7 @@
with ui.configoverride(overrides, b'merge'):
force = opts.get(b'force')
labels = [b'working copy', b'merge rev']
- return hg.merge(
- repo,
- node,
- force=force,
- mergeforce=force,
- labels=labels,
- abort=abort,
- )
+ return hg.merge(ctx, force=force, labels=labels)
statemod.addunfinished(
@@ -5337,6 +5343,7 @@
None,
_(b'run even when remote repository is unrelated'),
),
+ (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
(
b'r',
b'rev',
@@ -5453,6 +5460,7 @@
force=opts.get(b'force'),
bookmarks=opts.get(b'bookmark', ()),
opargs=pullopargs,
+ confirm=opts.get(b'confirm'),
).cgresult
# brev is a name, which might be a bookmark to be activated at
@@ -5671,7 +5679,7 @@
@command(
b'recover',
- [(b'', b'verify', True, b"run `hg verify` after successful recover"),],
+ [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
helpcategory=command.CATEGORY_MAINTENANCE,
)
def recover(ui, repo, **opts):
@@ -5946,6 +5954,8 @@
if not m(f):
continue
+ if ms[f] == mergemod.MERGE_RECORD_MERGED_OTHER:
+ continue
label, key = mergestateinfo[ms[f]]
fm.startitem()
fm.context(ctx=wctx)
@@ -5993,6 +6003,9 @@
didwork = True
+ if ms[f] == mergemod.MERGE_RECORD_MERGED_OTHER:
+ continue
+
# don't let driver-resolved files be marked, and run the conclude
# step if asked to resolve
if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
@@ -6648,7 +6661,12 @@
(b'i', b'ignored', None, _(b'show only ignored files')),
(b'n', b'no-status', None, _(b'hide status prefix')),
(b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
- (b'C', b'copies', None, _(b'show source of copied files')),
+ (
+ b'C',
+ b'copies',
+ None,
+ _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
+ ),
(
b'0',
b'print0',
@@ -7571,7 +7589,7 @@
unshelved.
"""
with repo.wlock():
- return shelvemod.dounshelve(ui, repo, *shelved, **opts)
+ return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
statemod.addunfinished(
@@ -7653,6 +7671,7 @@
Returns 0 on success, 1 if there are unresolved files.
"""
+ cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
rev = opts.get('rev')
date = opts.get('date')
clean = opts.get('clean')
@@ -7674,14 +7693,6 @@
if date and rev is not None:
raise error.Abort(_(b"you can't specify a revision and a date"))
- if len([x for x in (clean, check, merge) if x]) > 1:
- raise error.Abort(
- _(
- b"can only specify one of -C/--clean, -c/--check, "
- b"or -m/--merge"
- )
- )
-
updatecheck = None
if check:
updatecheck = b'abort'
--- a/mercurial/commandserver.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/commandserver.py Thu Apr 16 22:51:09 2020 +0530
@@ -545,6 +545,10 @@
if maxlen < 0:
raise error.Abort(_(b'negative max-repo-cache size not allowed'))
self._repoloader = repocache.repoloader(ui, maxlen)
+ # attempt to avoid crash in CoreFoundation when using chg after fix in
+ # a89381e04c58
+ if pycompat.isdarwin:
+ procutil.gui()
def init(self):
self._sock = socket.socket(socket.AF_UNIX)
--- a/mercurial/configitems.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/configitems.py Thu Apr 16 22:51:09 2020 +0530
@@ -405,6 +405,21 @@
coreconfigitem(
b'devel', b'legacy.exchange', default=list,
)
+# TODO before getting `persistent-nodemap` out of experimental
+#
+# * decide for a "status" of the persistent nodemap and associated location
+# - part of the store next the revlog itself (new requirements)
+# - part of the cache directory
+# - part of an `index` directory
+# (https://www.mercurial-scm.org/wiki/ComputedIndexPlan)
+# * do we want to use this for more than just changelog? if so we need:
+# - simpler "pending" logic for them
+# - double check the memory story (we dont want to keep all revlog in memory)
+# - think about the naming scheme if we are in "cache"
+# * increment the version format to "1" and freeze it.
+coreconfigitem(
+ b'devel', b'persistent-nodemap', default=False,
+)
coreconfigitem(
b'devel', b'servercafile', default=b'',
)
@@ -660,6 +675,12 @@
b'experimental', b'rust.index', default=False,
)
coreconfigitem(
+ b'experimental', b'exp-persistent-nodemap', default=False,
+)
+coreconfigitem(
+ b'experimental', b'exp-persistent-nodemap.mmap', default=True,
+)
+coreconfigitem(
b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
)
coreconfigitem(
@@ -750,7 +771,7 @@
coreconfigitem(
b'format',
b'revlog-compression',
- default=b'zlib',
+ default=lambda: [b'zlib'],
alias=[(b'experimental', b'format.compression')],
)
coreconfigitem(
@@ -1044,6 +1065,9 @@
b'progress', b'width', default=dynamicdefault,
)
coreconfigitem(
+ b'pull', b'confirm', default=False,
+)
+coreconfigitem(
b'push', b'pushvars.server', default=False,
)
coreconfigitem(
@@ -1107,7 +1131,7 @@
b'server', b'compressionengines', default=list,
)
coreconfigitem(
- b'server', b'concurrent-push-mode', default=b'strict',
+ b'server', b'concurrent-push-mode', default=b'check-related',
)
coreconfigitem(
b'server', b'disablefullbundle', default=False,
--- a/mercurial/context.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/context.py Thu Apr 16 22:51:09 2020 +0530
@@ -267,7 +267,7 @@
def _fileinfo(self, path):
if '_manifest' in self.__dict__:
try:
- return self._manifest[path], self._manifest.flags(path)
+ return self._manifest.find(path)
except KeyError:
raise error.ManifestLookupError(
self._node, path, _(b'not found in manifest')
@@ -2357,8 +2357,7 @@
# Test the other direction -- that this path from p2 isn't a directory
# in p1 (test that p1 doesn't have any paths matching `path/*`).
match = self.match([path], default=b'path')
- matches = self.p1().manifest().matches(match)
- mfiles = matches.keys()
+ mfiles = list(self.p1().manifest().walk(match))
if len(mfiles) > 0:
if len(mfiles) == 1 and mfiles[0] == path:
return
@@ -2488,6 +2487,17 @@
editor=editor,
)
+ def tomemctx_for_amend(self, precursor):
+ extra = precursor.extra().copy()
+ extra[b'amend_source'] = precursor.hex()
+ return self.tomemctx(
+ text=precursor.description(),
+ branch=precursor.branch(),
+ extra=extra,
+ date=precursor.date(),
+ user=precursor.user(),
+ )
+
def isdirty(self, path):
return path in self._cache
--- a/mercurial/copies.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/copies.py Thu Apr 16 22:51:09 2020 +0530
@@ -403,13 +403,15 @@
)
if x == y or not x or not y:
return {}
+ if y.rev() is None and x == y.p1():
+ if debug:
+ repo.ui.debug(b'debug.copies: search mode: dirstate\n')
+ # short-circuit to avoid issues with merge states
+ return _dirstatecopies(repo, match)
a = y.ancestor(x)
if a == x:
if debug:
repo.ui.debug(b'debug.copies: search mode: forward\n')
- if y.rev() is None and x == y.p1():
- # short-circuit to avoid issues with merge states
- return _dirstatecopies(repo, match)
copies = _forwardcopies(x, y, match=match)
elif a == y:
if debug:
@@ -452,44 +454,34 @@
```other changed <file> which local deleted```
- Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
- "dirmove".
+ Returns a tuple where:
- "copy" is a mapping from destination name -> source name,
- where source is in c1 and destination is in c2 or vice-versa.
-
- "movewithdir" is a mapping from source name -> destination name,
- where the file at source present in one context but not the other
- needs to be moved to destination by the merge process, because the
- other context moved the directory it is in.
+ "branch_copies" an instance of branch_copies.
"diverge" is a mapping of source name -> list of destination names
for divergent renames.
- "renamedelete" is a mapping of source name -> list of destination
- names for files deleted in c1 that were renamed in c2 or vice-versa.
-
- "dirmove" is a mapping of detected source dir -> destination dir renames.
- This is needed for handling changes to new files previously grafted into
- renamed directories.
-
This function calls different copytracing algorithms based on config.
"""
# avoid silly behavior for update from empty dir
if not c1 or not c2 or c1 == c2:
- return {}, {}, {}, {}, {}
+ return branch_copies(), branch_copies(), {}
narrowmatch = c1.repo().narrowmatch()
# avoid silly behavior for parent -> working dir
if c2.node() is None and c1.node() == repo.dirstate.p1():
- return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
+ return (
+ branch_copies(_dirstatecopies(repo, narrowmatch)),
+ branch_copies(),
+ {},
+ )
copytracing = repo.ui.config(b'experimental', b'copytrace')
if stringutil.parsebool(copytracing) is False:
# stringutil.parsebool() returns None when it is unable to parse the
# value, so we should rely on making sure copytracing is on such cases
- return {}, {}, {}, {}, {}
+ return branch_copies(), branch_copies(), {}
if usechangesetcentricalgo(repo):
# The heuristics don't make sense when we need changeset-centric algos
@@ -537,15 +529,45 @@
if src not in m1:
# renamed on side 1, deleted on side 2
renamedelete[src] = dsts1
+ elif src not in mb:
+ # Work around the "short-circuit to avoid issues with merge states"
+ # thing in pathcopies(): pathcopies(x, y) can return a copy where the
+ # destination doesn't exist in y.
+ pass
elif m2[src] != mb[src]:
if not _related(c2[src], base[src]):
return
# modified on side 2
for dst in dsts1:
- if dst not in m2:
- # dst not added on side 2 (handle as regular
- # "both created" case in manifestmerge otherwise)
- copy[dst] = src
+ copy[dst] = src
+
+
+class branch_copies(object):
+ """Information about copies made on one side of a merge/graft.
+
+ "copy" is a mapping from destination name -> source name,
+ where source is in c1 and destination is in c2 or vice-versa.
+
+ "movewithdir" is a mapping from source name -> destination name,
+ where the file at source present in one context but not the other
+ needs to be moved to destination by the merge process, because the
+ other context moved the directory it is in.
+
+ "renamedelete" is a mapping of source name -> list of destination
+ names for files deleted in c1 that were renamed in c2 or vice-versa.
+
+ "dirmove" is a mapping of detected source dir -> destination dir renames.
+ This is needed for handling changes to new files previously grafted into
+ renamed directories.
+ """
+
+ def __init__(
+ self, copy=None, renamedelete=None, dirmove=None, movewithdir=None
+ ):
+ self.copy = {} if copy is None else copy
+ self.renamedelete = {} if renamedelete is None else renamedelete
+ self.dirmove = {} if dirmove is None else dirmove
+ self.movewithdir = {} if movewithdir is None else movewithdir
def _fullcopytracing(repo, c1, c2, base):
@@ -563,6 +585,9 @@
copies1 = pathcopies(base, c1)
copies2 = pathcopies(base, c2)
+ if not (copies1 or copies2):
+ return branch_copies(), branch_copies(), {}
+
inversecopies1 = {}
inversecopies2 = {}
for dst, src in copies1.items():
@@ -570,9 +595,11 @@
for dst, src in copies2.items():
inversecopies2.setdefault(src, []).append(dst)
- copy = {}
+ copy1 = {}
+ copy2 = {}
diverge = {}
- renamedelete = {}
+ renamedelete1 = {}
+ renamedelete2 = {}
allsources = set(inversecopies1) | set(inversecopies2)
for src in allsources:
dsts1 = inversecopies1.get(src)
@@ -589,7 +616,8 @@
# and 'd' and deletes 'a'.
if dsts1 & dsts2:
for dst in dsts1 & dsts2:
- copy[dst] = src
+ copy1[dst] = src
+ copy2[dst] = src
else:
diverge[src] = sorted(dsts1 | dsts2)
elif src in m1 and src in m2:
@@ -597,27 +625,21 @@
dsts1 = set(dsts1)
dsts2 = set(dsts2)
for dst in dsts1 & dsts2:
- copy[dst] = src
+ copy1[dst] = src
+ copy2[dst] = src
# TODO: Handle cases where it was renamed on one side and copied
# on the other side
elif dsts1:
# copied/renamed only on side 1
_checksinglesidecopies(
- src, dsts1, m1, m2, mb, c2, base, copy, renamedelete
+ src, dsts1, m1, m2, mb, c2, base, copy1, renamedelete1
)
elif dsts2:
# copied/renamed only on side 2
_checksinglesidecopies(
- src, dsts2, m2, m1, mb, c1, base, copy, renamedelete
+ src, dsts2, m2, m1, mb, c1, base, copy2, renamedelete2
)
- renamedeleteset = set()
- divergeset = set()
- for dsts in diverge.values():
- divergeset.update(dsts)
- for dsts in renamedelete.values():
- renamedeleteset.update(dsts)
-
# find interesting file sets from manifests
addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
@@ -630,33 +652,60 @@
if u2:
repo.ui.debug(b"%s:\n %s\n" % (header % b'other', b"\n ".join(u2)))
- fullcopy = copies1.copy()
- fullcopy.update(copies2)
- if not fullcopy:
- return copy, {}, diverge, renamedelete, {}
+ if repo.ui.debugflag:
+ renamedeleteset = set()
+ divergeset = set()
+ for dsts in diverge.values():
+ divergeset.update(dsts)
+ for dsts in renamedelete1.values():
+ renamedeleteset.update(dsts)
+ for dsts in renamedelete2.values():
+ renamedeleteset.update(dsts)
- if repo.ui.debugflag:
repo.ui.debug(
b" all copies found (* = to merge, ! = divergent, "
b"% = renamed and deleted):\n"
)
- for f in sorted(fullcopy):
- note = b""
- if f in copy:
- note += b"*"
- if f in divergeset:
- note += b"!"
- if f in renamedeleteset:
- note += b"%"
- repo.ui.debug(
- b" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note)
- )
- del divergeset
+ for side, copies in ((b"local", copies1), (b"remote", copies2)):
+ if not copies:
+ continue
+ repo.ui.debug(b" on %s side:\n" % side)
+ for f in sorted(copies):
+ note = b""
+ if f in copy1 or f in copy2:
+ note += b"*"
+ if f in divergeset:
+ note += b"!"
+ if f in renamedeleteset:
+ note += b"%"
+ repo.ui.debug(
+ b" src: '%s' -> dst: '%s' %s\n" % (copies[f], f, note)
+ )
+ del renamedeleteset
+ del divergeset
repo.ui.debug(b" checking for directory renames\n")
+ dirmove1, movewithdir2 = _dir_renames(repo, c1, copy1, copies1, u2)
+ dirmove2, movewithdir1 = _dir_renames(repo, c2, copy2, copies2, u1)
+
+ branch_copies1 = branch_copies(copy1, renamedelete1, dirmove1, movewithdir1)
+ branch_copies2 = branch_copies(copy2, renamedelete2, dirmove2, movewithdir2)
+
+ return branch_copies1, branch_copies2, diverge
+
+
+def _dir_renames(repo, ctx, copy, fullcopy, addedfiles):
+ """Finds moved directories and files that should move with them.
+
+ ctx: the context for one of the sides
+ copy: files copied on the same side (as ctx)
+ fullcopy: files copied on the same side (as ctx), including those that
+ merge.manifestmerge() won't care about
+ addedfiles: added files on the other side (compared to ctx)
+ """
# generate a directory move map
- d1, d2 = c1.dirs(), c2.dirs()
+ d = ctx.dirs()
invalid = set()
dirmove = {}
@@ -667,12 +716,9 @@
if dsrc in invalid:
# already seen to be uninteresting
continue
- elif dsrc in d1 and ddst in d1:
+ elif dsrc in d and ddst in d:
# directory wasn't entirely moved locally
invalid.add(dsrc)
- elif dsrc in d2 and ddst in d2:
- # directory wasn't entirely moved remotely
- invalid.add(dsrc)
elif dsrc in dirmove and dirmove[dsrc] != ddst:
# files from the same directory moved to two different places
invalid.add(dsrc)
@@ -683,10 +729,10 @@
for i in invalid:
if i in dirmove:
del dirmove[i]
- del d1, d2, invalid
+ del d, invalid
if not dirmove:
- return copy, {}, diverge, renamedelete, {}
+ return {}, {}
dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)}
@@ -697,7 +743,7 @@
movewithdir = {}
# check unaccounted nonoverlapping files against directory moves
- for f in u1 + u2:
+ for f in addedfiles:
if f not in fullcopy:
for d in dirmove:
if f.startswith(d):
@@ -711,7 +757,7 @@
)
break
- return copy, movewithdir, diverge, renamedelete, dirmove
+ return dirmove, movewithdir
def _heuristicscopytracing(repo, c1, c2, base):
@@ -744,8 +790,6 @@
if c2.rev() is None:
c2 = c2.p1()
- copies = {}
-
changedfiles = set()
m1 = c1.manifest()
if not repo.revs(b'%d::%d', base.rev(), c2.rev()):
@@ -765,10 +809,11 @@
changedfiles.update(ctx.files())
ctx = ctx.p1()
+ copies2 = {}
cp = _forwardcopies(base, c2)
for dst, src in pycompat.iteritems(cp):
if src in m1:
- copies[dst] = src
+ copies2[dst] = src
# file is missing if it isn't present in the destination, but is present in
# the base and present in the source.
@@ -777,6 +822,7 @@
filt = lambda f: f not in m1 and f in base and f in c2
missingfiles = [f for f in changedfiles if filt(f)]
+ copies1 = {}
if missingfiles:
basenametofilename = collections.defaultdict(list)
dirnametofilename = collections.defaultdict(list)
@@ -818,9 +864,9 @@
# if there are a few related copies then we'll merge
# changes into all of them. This matches the behaviour
# of upstream copytracing
- copies[candidate] = f
+ copies1[candidate] = f
- return copies, {}, {}, {}, {}
+ return branch_copies(copies1), branch_copies(copies2), {}
def _related(f1, f2):
--- a/mercurial/crecord.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/crecord.py Thu Apr 16 22:51:09 2020 +0530
@@ -63,13 +63,13 @@
import curses.ascii
curses.error
-except ImportError:
+except (ImportError, AttributeError):
# I have no idea if wcurses works with crecord...
try:
import wcurses as curses
curses.error
- except ImportError:
+ except (ImportError, AttributeError):
# wcurses is not shipped on Windows by default, or python is not
# compiled with curses
curses = False
--- a/mercurial/dagop.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/dagop.py Thu Apr 16 22:51:09 2020 +0530
@@ -274,6 +274,247 @@
break
+class subsetparentswalker(object):
+ r"""Scan adjacent ancestors in the graph given by the subset
+
+ This computes parent-child relations in the sub graph filtered by
+ a revset. Primary use case is to draw a revisions graph.
+
+ In the following example, we consider that the node 'f' has edges to all
+ ancestor nodes, but redundant paths are eliminated. The edge 'f'->'b'
+ is eliminated because there is a path 'f'->'c'->'b' for example.
+
+ - d - e -
+ / \
+ a - b - c - f
+
+ If the node 'c' is filtered out, the edge 'f'->'b' is activated.
+
+ - d - e -
+ / \
+ a - b -(c)- f
+
+ Likewise, if 'd' and 'e' are filtered out, this edge is fully eliminated
+ since there is a path 'f'->'c'->'b'->'a' for 'f'->'a'.
+
+ (d) (e)
+
+ a - b - c - f
+
+ Implementation-wise, 'f' is passed down to 'a' as unresolved through the
+ 'f'->'e'->'d'->'a' path, whereas we do also remember that 'f' has already
+ been resolved while walking down the 'f'->'c'->'b'->'a' path. When
+ processing the node 'a', the unresolved 'f'->'a' path is eliminated as
+ the 'f' end is marked as resolved.
+
+ Ancestors are searched from the tipmost revision in the subset so the
+ results can be cached. You should specify startrev to narrow the search
+ space to ':startrev'.
+ """
+
+ def __init__(self, repo, subset, startrev=None):
+ if startrev is not None:
+ subset = repo.revs(b'%d:null', startrev) & subset
+
+ # equivalent to 'subset = subset.sorted(reverse=True)', but there's
+ # no such function.
+ fastdesc = subset.fastdesc
+ if fastdesc:
+ desciter = fastdesc()
+ else:
+ if not subset.isdescending() and not subset.istopo():
+ subset = smartset.baseset(subset)
+ subset.sort(reverse=True)
+ desciter = iter(subset)
+
+ self._repo = repo
+ self._changelog = repo.changelog
+ self._subset = subset
+
+ # scanning state (see _scanparents):
+ self._tovisit = []
+ self._pendingcnt = {}
+ self._pointers = {}
+ self._parents = {}
+ self._inputhead = nullrev # reassigned by self._advanceinput()
+ self._inputtail = desciter
+ self._bottomrev = nullrev
+ self._advanceinput()
+
+ def parentsset(self, rev):
+ """Look up parents of the given revision in the subset, and returns
+ as a smartset"""
+ return smartset.baseset(self.parents(rev))
+
+ def parents(self, rev):
+ """Look up parents of the given revision in the subset
+
+ The returned revisions are sorted by parent index (p1/p2).
+ """
+ self._scanparents(rev)
+ return [r for _c, r in sorted(self._parents.get(rev, []))]
+
+ def _parentrevs(self, rev):
+ try:
+ revs = self._changelog.parentrevs(rev)
+ if revs[-1] == nullrev:
+ return revs[:-1]
+ return revs
+ except error.WdirUnsupported:
+ return tuple(pctx.rev() for pctx in self._repo[None].parents())
+
+ def _advanceinput(self):
+ """Advance the input iterator and set the next revision to _inputhead"""
+ if self._inputhead < nullrev:
+ return
+ try:
+ self._inputhead = next(self._inputtail)
+ except StopIteration:
+ self._bottomrev = self._inputhead
+ self._inputhead = nullrev - 1
+
+ def _scanparents(self, stoprev):
+ """Scan ancestors until the parents of the specified stoprev are
+ resolved"""
+
+ # 'tovisit' is the queue of the input revisions and their ancestors.
+ # It will be populated incrementally to minimize the initial cost
+ # of computing the given subset.
+ #
+ # For to-visit revisions, we keep track of
+ # - the number of the unresolved paths: pendingcnt[rev],
+ # - dict of the unresolved descendants and chains: pointers[rev][0],
+ # - set of the already resolved descendants: pointers[rev][1].
+ #
+ # When a revision is visited, 'pointers[rev]' should be popped and
+ # propagated to its parents accordingly.
+ #
+ # Once all pending paths have been resolved, 'pendingcnt[rev]' becomes
+ # 0 and 'parents[rev]' contains the unsorted list of parent revisions
+ # and p1/p2 chains (excluding linear paths.) The p1/p2 chains will be
+ # used as a sort key preferring p1. 'len(chain)' should be the number
+ # of merges between two revisions.
+
+ subset = self._subset
+ tovisit = self._tovisit # heap queue of [-rev]
+ pendingcnt = self._pendingcnt # {rev: count} for visited revisions
+ pointers = self._pointers # {rev: [{unresolved_rev: chain}, resolved]}
+ parents = self._parents # {rev: [(chain, rev)]}
+
+ while tovisit or self._inputhead >= nullrev:
+ if pendingcnt.get(stoprev) == 0:
+ return
+
+ # feed greater revisions from input set to queue
+ if not tovisit:
+ heapq.heappush(tovisit, -self._inputhead)
+ self._advanceinput()
+ while self._inputhead >= -tovisit[0]:
+ heapq.heappush(tovisit, -self._inputhead)
+ self._advanceinput()
+
+ rev = -heapq.heappop(tovisit)
+ if rev < self._bottomrev:
+ return
+ if rev in pendingcnt and rev not in pointers:
+ continue # already visited
+
+ curactive = rev in subset
+ pendingcnt.setdefault(rev, 0) # mark as visited
+ if curactive:
+ assert rev not in parents
+ parents[rev] = []
+ unresolved, resolved = pointers.pop(rev, ({}, set()))
+
+ if curactive:
+ # reached to active rev, resolve pending descendants' parents
+ for r, c in unresolved.items():
+ pendingcnt[r] -= 1
+ assert pendingcnt[r] >= 0
+ if r in resolved:
+ continue # eliminate redundant path
+ parents[r].append((c, rev))
+ # mark the descendant 'r' as resolved through this path if
+ # there are still pending pointers. the 'resolved' set may
+ # be concatenated later at a fork revision.
+ if pendingcnt[r] > 0:
+ resolved.add(r)
+ unresolved.clear()
+ # occasionally clean resolved markers. otherwise the set
+ # would grow indefinitely.
+ resolved = {r for r in resolved if pendingcnt[r] > 0}
+
+ parentrevs = self._parentrevs(rev)
+ bothparentsactive = all(p in subset for p in parentrevs)
+
+ # set up or propagate tracking pointers if
+ # - one of the parents is not active,
+ # - or descendants' parents are unresolved.
+ if not bothparentsactive or unresolved or resolved:
+ if len(parentrevs) <= 1:
+ # can avoid copying the tracking pointer
+ parentpointers = [(unresolved, resolved)]
+ else:
+ parentpointers = [
+ (unresolved, resolved),
+ (unresolved.copy(), resolved.copy()),
+ ]
+ # 'rev' is a merge revision. increment the pending count
+ # as the 'unresolved' dict will be duplicated, and append
+ # p1/p2 code to the existing chains.
+ for r in unresolved:
+ pendingcnt[r] += 1
+ parentpointers[0][0][r] += b'1'
+ parentpointers[1][0][r] += b'2'
+ for i, p in enumerate(parentrevs):
+ assert p < rev
+ heapq.heappush(tovisit, -p)
+ if p in pointers:
+ # 'p' is a fork revision. concatenate tracking pointers
+ # and decrement the pending count accordingly.
+ knownunresolved, knownresolved = pointers[p]
+ unresolved, resolved = parentpointers[i]
+ for r, c in unresolved.items():
+ if r in knownunresolved:
+ # unresolved at both paths
+ pendingcnt[r] -= 1
+ assert pendingcnt[r] > 0
+ # take shorter chain
+ knownunresolved[r] = min(c, knownunresolved[r])
+ else:
+ knownunresolved[r] = c
+ # simply propagate the 'resolved' set as deduplicating
+ # 'unresolved' here would be slightly complicated.
+ knownresolved.update(resolved)
+ else:
+ pointers[p] = parentpointers[i]
+
+ # then, populate the active parents directly and add the current
+ # 'rev' to the tracking pointers of the inactive parents.
+ # 'pointers[p]' may be optimized out if both parents are active.
+ chaincodes = [b''] if len(parentrevs) <= 1 else [b'1', b'2']
+ if curactive and bothparentsactive:
+ for i, p in enumerate(parentrevs):
+ c = chaincodes[i]
+ parents[rev].append((c, p))
+ # no need to mark 'rev' as resolved since the 'rev' should
+ # be fully resolved (i.e. pendingcnt[rev] == 0)
+ assert pendingcnt[rev] == 0
+ elif curactive:
+ for i, p in enumerate(parentrevs):
+ unresolved, resolved = pointers[p]
+ assert rev not in unresolved
+ c = chaincodes[i]
+ if p in subset:
+ parents[rev].append((c, p))
+ # mark 'rev' as resolved through this path
+ resolved.add(rev)
+ else:
+ pendingcnt[rev] += 1
+ unresolved[rev] = c
+ assert 0 < pendingcnt[rev] <= 2
+
+
def _reachablerootspure(pfunc, minroot, roots, heads, includepath):
"""See revlog.reachableroots"""
if not roots:
--- a/mercurial/debugcommands.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/debugcommands.py Thu Apr 16 22:51:09 2020 +0530
@@ -11,8 +11,10 @@
import collections
import difflib
import errno
+import glob
import operator
import os
+import platform
import random
import re
import socket
@@ -27,7 +29,6 @@
from .node import (
bin,
hex,
- nullhex,
nullid,
nullrev,
short,
@@ -38,6 +39,7 @@
)
from . import (
bundle2,
+ bundlerepo,
changegroup,
cmdutil,
color,
@@ -75,6 +77,7 @@
sshpeer,
sslutil,
streamclone,
+ tags as tagsmod,
templater,
treediscovery,
upgrade,
@@ -93,7 +96,10 @@
stringutil,
)
-from .revlogutils import deltas as deltautil
+from .revlogutils import (
+ deltas as deltautil,
+ nodemap,
+)
release = lockmod.release
@@ -578,7 +584,7 @@
dots = opts.get('dots')
if file_:
rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
- revs = set((int(r) for r in revs))
+ revs = {int(r) for r in revs}
def events():
for r in rlog:
@@ -1128,7 +1134,7 @@
(b'analyzed', filesetlang.analyze),
(b'optimized', filesetlang.optimize),
]
- stagenames = set(n for n, f in stages)
+ stagenames = {n for n, f in stages}
showalways = set()
if ui.verbose and not opts[b'show_stage']:
@@ -1487,6 +1493,11 @@
pycompat.sysexecutable or _(b"unknown"),
)
fm.write(
+ b'pythonimplementation',
+ _(b"checking Python implementation (%s)\n"),
+ pycompat.sysbytes(platform.python_implementation()),
+ )
+ fm.write(
b'pythonver',
_(b"checking Python version (%s)\n"),
(b"%d.%d.%d" % sys.version_info[:3]),
@@ -1497,6 +1508,13 @@
pythonlib or _(b"unknown"),
)
+ try:
+ from . import rustext
+
+ rustext.__doc__ # trigger lazy import
+ except ImportError:
+ rustext = None
+
security = set(sslutil.supportedprotocols)
if sslutil.hassni:
security.add(b'sni')
@@ -1524,6 +1542,13 @@
)
)
+ fm.plain(
+ _(
+ b"checking Rust extensions (%s)\n"
+ % (b'missing' if rustext is None else b'installed')
+ ),
+ )
+
# TODO print CA cert info
# hg version
@@ -1625,6 +1650,13 @@
fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
fm.data(re2=bool(util._re2))
+ rust_debug_mod = policy.importrust("debug")
+ if rust_debug_mod is not None:
+ re2_rust = b'installed' if rust_debug_mod.re2_installed else b'missing'
+
+ msg = b'checking "re2" regexp engine Rust bindings (%s)\n'
+ fm.plain(_(msg % re2_rust))
+
# templates
p = templater.templatepaths()
fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
@@ -1934,120 +1966,100 @@
)
-@command(b'debugmergestate', [], b'')
-def debugmergestate(ui, repo, *args):
+@command(b'debugmergestate', [] + cmdutil.templateopts, b'')
+def debugmergestate(ui, repo, *args, **opts):
"""print merge state
Use --verbose to print out information about whether v1 or v2 merge state
was chosen."""
- def _hashornull(h):
- if h == nullhex:
- return b'null'
- else:
- return h
-
- def printrecords(version):
- ui.writenoi18n(b'* version %d records\n' % version)
- if version == 1:
- records = v1records
+ if ui.verbose:
+ ms = mergemod.mergestate(repo)
+
+ # sort so that reasonable information is on top
+ v1records = ms._readrecordsv1()
+ v2records = ms._readrecordsv2()
+
+ if not v1records and not v2records:
+ pass
+ elif not v2records:
+ ui.writenoi18n(b'no version 2 merge state\n')
+ elif ms._v1v2match(v1records, v2records):
+ ui.writenoi18n(b'v1 and v2 states match: using v2\n')
else:
- records = v2records
-
- for rtype, record in records:
- # pretty print some record types
- if rtype == b'L':
- ui.writenoi18n(b'local: %s\n' % record)
- elif rtype == b'O':
- ui.writenoi18n(b'other: %s\n' % record)
- elif rtype == b'm':
- driver, mdstate = record.split(b'\0', 1)
- ui.writenoi18n(
- b'merge driver: %s (state "%s")\n' % (driver, mdstate)
- )
- elif rtype in b'FDC':
- r = record.split(b'\0')
- f, state, hash, lfile, afile, anode, ofile = r[0:7]
- if version == 1:
- onode = b'not stored in v1 format'
- flags = r[7]
- else:
- onode, flags = r[7:9]
- ui.writenoi18n(
- b'file: %s (record type "%s", state "%s", hash %s)\n'
- % (f, rtype, state, _hashornull(hash))
- )
- ui.writenoi18n(
- b' local path: %s (flags "%s")\n' % (lfile, flags)
- )
- ui.writenoi18n(
- b' ancestor path: %s (node %s)\n'
- % (afile, _hashornull(anode))
- )
- ui.writenoi18n(
- b' other path: %s (node %s)\n'
- % (ofile, _hashornull(onode))
- )
- elif rtype == b'f':
- filename, rawextras = record.split(b'\0', 1)
- extras = rawextras.split(b'\0')
- i = 0
- extrastrings = []
- while i < len(extras):
- extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
- i += 2
-
- ui.writenoi18n(
- b'file extras: %s (%s)\n'
- % (filename, b', '.join(extrastrings))
- )
- elif rtype == b'l':
- labels = record.split(b'\0', 2)
- labels = [l for l in labels if len(l) > 0]
- ui.writenoi18n(b'labels:\n')
- ui.write((b' local: %s\n' % labels[0]))
- ui.write((b' other: %s\n' % labels[1]))
- if len(labels) > 2:
- ui.write((b' base: %s\n' % labels[2]))
- else:
- ui.writenoi18n(
- b'unrecognized entry: %s\t%s\n'
- % (rtype, record.replace(b'\0', b'\t'))
- )
-
- # Avoid mergestate.read() since it may raise an exception for unsupported
- # merge state records. We shouldn't be doing this, but this is OK since this
- # command is pretty low-level.
- ms = mergemod.mergestate(repo)
-
- # sort so that reasonable information is on top
- v1records = ms._readrecordsv1()
- v2records = ms._readrecordsv2()
- order = b'LOml'
-
- def key(r):
- idx = order.find(r[0])
- if idx == -1:
- return (1, r[1])
- else:
- return (0, idx)
-
- v1records.sort(key=key)
- v2records.sort(key=key)
-
- if not v1records and not v2records:
- ui.writenoi18n(b'no merge state found\n')
- elif not v2records:
- ui.notenoi18n(b'no version 2 merge state\n')
- printrecords(1)
- elif ms._v1v2match(v1records, v2records):
- ui.notenoi18n(b'v1 and v2 states match: using v2\n')
- printrecords(2)
- else:
- ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
- printrecords(1)
- if ui.verbose:
- printrecords(2)
+ ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
+
+ opts = pycompat.byteskwargs(opts)
+ if not opts[b'template']:
+ opts[b'template'] = (
+ b'{if(commits, "", "no merge state found\n")}'
+ b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
+ b'{files % "file: {path} (state \\"{state}\\")\n'
+ b'{if(local_path, "'
+ b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
+ b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
+ b' other path: {other_path} (node {other_node})\n'
+ b'")}'
+ b'{if(rename_side, "'
+ b' rename side: {rename_side}\n'
+ b' renamed path: {renamed_path}\n'
+ b'")}'
+ b'{extras % " extra: {key} = {value}\n"}'
+ b'"}'
+ )
+
+ ms = mergemod.mergestate.read(repo)
+
+ fm = ui.formatter(b'debugmergestate', opts)
+ fm.startitem()
+
+ fm_commits = fm.nested(b'commits')
+ if ms.active():
+ for name, node, label_index in (
+ (b'local', ms.local, 0),
+ (b'other', ms.other, 1),
+ ):
+ fm_commits.startitem()
+ fm_commits.data(name=name)
+ fm_commits.data(node=hex(node))
+ if ms._labels and len(ms._labels) > label_index:
+ fm_commits.data(label=ms._labels[label_index])
+ fm_commits.end()
+
+ fm_files = fm.nested(b'files')
+ if ms.active():
+ for f in ms:
+ fm_files.startitem()
+ fm_files.data(path=f)
+ state = ms._state[f]
+ fm_files.data(state=state[0])
+ if state[0] in (
+ mergemod.MERGE_RECORD_UNRESOLVED,
+ mergemod.MERGE_RECORD_RESOLVED,
+ ):
+ fm_files.data(local_key=state[1])
+ fm_files.data(local_path=state[2])
+ fm_files.data(ancestor_path=state[3])
+ fm_files.data(ancestor_node=state[4])
+ fm_files.data(other_path=state[5])
+ fm_files.data(other_node=state[6])
+ fm_files.data(local_flags=state[7])
+ elif state[0] in (
+ mergemod.MERGE_RECORD_UNRESOLVED_PATH,
+ mergemod.MERGE_RECORD_RESOLVED_PATH,
+ ):
+ fm_files.data(renamed_path=state[1])
+ fm_files.data(rename_side=state[2])
+ fm_extras = fm_files.nested(b'extras')
+ for k, v in ms.extras(f).items():
+ fm_extras.startitem()
+ fm_extras.data(key=k)
+ fm_extras.data(value=v)
+ fm_extras.end()
+
+ fm_files.end()
+
+ fm.end()
@command(b'debugnamecomplete', [], _(b'NAME...'))
@@ -2075,6 +2087,70 @@
@command(
+ b'debugnodemap',
+ [
+ (
+ b'',
+ b'dump-new',
+ False,
+ _(b'write a (new) persistent binary nodemap on stdin'),
+ ),
+ (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
+ (
+ b'',
+ b'check',
+ False,
+ _(b'check that the data on disk data are correct.'),
+ ),
+ (
+ b'',
+ b'metadata',
+ False,
+ _(b'display the on disk meta data for the nodemap'),
+ ),
+ ],
+)
+def debugnodemap(ui, repo, **opts):
+ """write and inspect on disk nodemap
+ """
+ if opts['dump_new']:
+ unfi = repo.unfiltered()
+ cl = unfi.changelog
+ if util.safehasattr(cl.index, "nodemap_data_all"):
+ data = cl.index.nodemap_data_all()
+ else:
+ data = nodemap.persistent_data(cl.index)
+ ui.write(data)
+ elif opts['dump_disk']:
+ unfi = repo.unfiltered()
+ cl = unfi.changelog
+ nm_data = nodemap.persisted_data(cl)
+ if nm_data is not None:
+ docket, data = nm_data
+ ui.write(data[:])
+ elif opts['check']:
+ unfi = repo.unfiltered()
+ cl = unfi.changelog
+ nm_data = nodemap.persisted_data(cl)
+ if nm_data is not None:
+ docket, data = nm_data
+ return nodemap.check_data(ui, cl.index, data)
+ elif opts['metadata']:
+ unfi = repo.unfiltered()
+ cl = unfi.changelog
+ nm_data = nodemap.persisted_data(cl)
+ if nm_data is not None:
+ docket, data = nm_data
+ ui.write((b"uid: %s\n") % docket.uid)
+ ui.write((b"tip-rev: %d\n") % docket.tip_rev)
+ ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
+ ui.write((b"data-length: %d\n") % docket.data_length)
+ ui.write((b"data-unused: %d\n") % docket.data_unused)
+ unused_perc = docket.data_unused * 100.0 / docket.data_length
+ ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
+
+
+@command(
b'debugobsolete',
[
(b'', b'flags', 0, _(b'markers flag')),
@@ -2549,7 +2625,7 @@
dirstatefiles = set(dirstate)
manifestonly = manifestfiles - dirstatefiles
dsonly = dirstatefiles - manifestfiles
- dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
+ dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
changedfiles = manifestonly | dsnotadded
dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
@@ -3116,7 +3192,7 @@
raise error.Abort(
_(b'cannot use --verify-optimized with --no-optimized')
)
- stagenames = set(n for n, f in stages)
+ stagenames = {n for n, f in stages}
showalways = set()
showchanged = set()
@@ -3355,6 +3431,143 @@
@command(
+ b"debugbackupbundle",
+ [
+ (
+ b"",
+ b"recover",
+ b"",
+ b"brings the specified changeset back into the repository",
+ )
+ ]
+ + cmdutil.logopts,
+ _(b"hg debugbackupbundle [--recover HASH]"),
+)
+def debugbackupbundle(ui, repo, *pats, **opts):
+ """lists the changesets available in backup bundles
+
+ Without any arguments, this command prints a list of the changesets in each
+ backup bundle.
+
+ --recover takes a changeset hash and unbundles the first bundle that
+ contains that hash, which puts that changeset back in your repository.
+
+ --verbose will print the entire commit message and the bundle path for that
+ backup.
+ """
+ backups = list(
+ filter(
+ os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
+ )
+ )
+ backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
+
+ opts = pycompat.byteskwargs(opts)
+ opts[b"bundle"] = b""
+ opts[b"force"] = None
+ limit = logcmdutil.getlimit(opts)
+
+ def display(other, chlist, displayer):
+ if opts.get(b"newest_first"):
+ chlist.reverse()
+ count = 0
+ for n in chlist:
+ if limit is not None and count >= limit:
+ break
+ parents = [True for p in other.changelog.parents(n) if p != nullid]
+ if opts.get(b"no_merges") and len(parents) == 2:
+ continue
+ count += 1
+ displayer.show(other[n])
+
+ recovernode = opts.get(b"recover")
+ if recovernode:
+ if scmutil.isrevsymbol(repo, recovernode):
+ ui.warn(_(b"%s already exists in the repo\n") % recovernode)
+ return
+ elif backups:
+ msg = _(
+ b"Recover changesets using: hg debugbackupbundle --recover "
+ b"<changeset hash>\n\nAvailable backup changesets:"
+ )
+ ui.status(msg, label=b"status.removed")
+ else:
+ ui.status(_(b"no backup changesets found\n"))
+ return
+
+ for backup in backups:
+ # Much of this is copied from the hg incoming logic
+ source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
+ source, branches = hg.parseurl(source, opts.get(b"branch"))
+ try:
+ other = hg.peer(repo, opts, source)
+ except error.LookupError as ex:
+ msg = _(b"\nwarning: unable to open bundle %s") % source
+ hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
+ ui.warn(msg, hint=hint)
+ continue
+ revs, checkout = hg.addbranchrevs(
+ repo, other, branches, opts.get(b"rev")
+ )
+
+ if revs:
+ revs = [other.lookup(rev) for rev in revs]
+
+ quiet = ui.quiet
+ try:
+ ui.quiet = True
+ other, chlist, cleanupfn = bundlerepo.getremotechanges(
+ ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
+ )
+ except error.LookupError:
+ continue
+ finally:
+ ui.quiet = quiet
+
+ try:
+ if not chlist:
+ continue
+ if recovernode:
+ with repo.lock(), repo.transaction(b"unbundle") as tr:
+ if scmutil.isrevsymbol(other, recovernode):
+ ui.status(_(b"Unbundling %s\n") % (recovernode))
+ f = hg.openpath(ui, source)
+ gen = exchange.readbundle(ui, f, source)
+ if isinstance(gen, bundle2.unbundle20):
+ bundle2.applybundle(
+ repo,
+ gen,
+ tr,
+ source=b"unbundle",
+ url=b"bundle:" + source,
+ )
+ else:
+ gen.apply(repo, b"unbundle", b"bundle:" + source)
+ break
+ else:
+ backupdate = encoding.strtolocal(
+ time.strftime(
+ "%a %H:%M, %Y-%m-%d",
+ time.localtime(os.path.getmtime(source)),
+ )
+ )
+ ui.status(b"\n%s\n" % (backupdate.ljust(50)))
+ if ui.verbose:
+ ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
+ else:
+ opts[
+ b"template"
+ ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
+ displayer = logcmdutil.changesetdisplayer(
+ ui, other, opts, False
+ )
+ display(other, chlist, displayer)
+ displayer.close()
+ finally:
+ cleanupfn()
+
+
+@command(
b'debugsub',
[(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
_(b'[-r REV] [REV]'),
@@ -3423,6 +3636,17 @@
ui.write(b'\n')
+@command(b'debugtagscache', [])
+def debugtagscache(ui, repo):
+ """display the contents of .hg/cache/hgtagsfnodes1"""
+ cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
+ for r in repo:
+ node = repo[r].node()
+ tagsnode = cache.getfnode(node, computemissing=False)
+ tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
+ ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
+
+
@command(
b'debugtemplate',
[
@@ -3497,7 +3721,7 @@
def debuguigetpass(ui, prompt=b''):
"""show prompt to type password"""
r = ui.getpass(prompt)
- ui.writenoi18n(b'respose: %s\n' % r)
+ ui.writenoi18n(b'response: %s\n' % r)
@command(
--- a/mercurial/dirstate.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/dirstate.py Thu Apr 16 22:51:09 2020 +0530
@@ -27,6 +27,7 @@
policy,
pycompat,
scmutil,
+ sparse,
txnutil,
util,
)
@@ -1083,7 +1084,7 @@
results[next(iv)] = st
return results
- def _rust_status(self, matcher, list_clean):
+ def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
# Force Rayon (Rust parallelism library) to respect the number of
# workers. This is a temporary workaround until Rust code knows
# how to read the config file.
@@ -1101,16 +1102,45 @@
added,
removed,
deleted,
+ clean,
+ ignored,
unknown,
- clean,
+ warnings,
+ bad,
) = rustmod.status(
self._map._rustmap,
matcher,
self._rootdir,
- bool(list_clean),
+ self._ignorefiles(),
+ self._checkexec,
self._lastnormaltime,
- self._checkexec,
+ bool(list_clean),
+ bool(list_ignored),
+ bool(list_unknown),
)
+ if self._ui.warn:
+ for item in warnings:
+ if isinstance(item, tuple):
+ file_path, syntax = item
+ msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
+ file_path,
+ syntax,
+ )
+ self._ui.warn(msg)
+ else:
+ msg = _(b"skipping unreadable pattern file '%s': %s\n")
+ self._ui.warn(
+ msg
+ % (
+ pathutil.canonpath(
+ self._rootdir, self._rootdir, item
+ ),
+ b"No such file or directory",
+ )
+ )
+
+ for (fn, message) in bad:
+ matcher.bad(fn, encoding.strtolocal(message))
status = scmutil.status(
modified=modified,
@@ -1118,7 +1148,7 @@
removed=removed,
deleted=deleted,
unknown=unknown,
- ignored=[],
+ ignored=ignored,
clean=clean,
)
return (lookup, status)
@@ -1148,26 +1178,34 @@
use_rust = True
- allowed_matchers = (matchmod.alwaysmatcher, matchmod.exactmatcher)
+ allowed_matchers = (
+ matchmod.alwaysmatcher,
+ matchmod.exactmatcher,
+ matchmod.includematcher,
+ )
if rustmod is None:
use_rust = False
+ elif self._checkcase:
+ # Case-insensitive filesystems are not handled yet
+ use_rust = False
elif subrepos:
use_rust = False
- elif bool(listunknown):
- # Pathauditor does not exist yet in Rust, unknown files
- # can't be trusted.
+ elif sparse.enabled:
use_rust = False
- elif self._ignorefiles() and listignored:
- # Rust has no ignore mechanism yet, so don't use Rust for
- # commands that need ignore.
+ elif match.traversedir is not None:
use_rust = False
elif not isinstance(match, allowed_matchers):
# Matchers have yet to be implemented
use_rust = False
if use_rust:
- return self._rust_status(match, listclean)
+ try:
+ return self._rust_status(
+ match, listclean, listignored, listunknown
+ )
+ except rustmod.FallbackError:
+ pass
def noop(f):
pass
@@ -1249,19 +1287,19 @@
aadd(fn)
elif state == b'r':
radd(fn)
-
- return (
- lookup,
- scmutil.status(
- modified, added, removed, deleted, unknown, ignored, clean
- ),
+ status = scmutil.status(
+ modified, added, removed, deleted, unknown, ignored, clean
)
+ return (lookup, status)
def matches(self, match):
'''
return files in the dirstate (in whatever state) filtered by match
'''
dmap = self._map
+ if rustmod is not None:
+ dmap = self._map._rustmap
+
if match.always():
return dmap.keys()
files = match.files()
--- a/mercurial/discovery.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/discovery.py Thu Apr 16 22:51:09 2020 +0530
@@ -192,7 +192,7 @@
# ancestors of missing
og._computecommonmissing()
cl = repo.changelog
- missingrevs = set(cl.rev(n) for n in og._missing)
+ missingrevs = {cl.rev(n) for n in og._missing}
og._common = set(cl.ancestors(missingrevs)) - missingrevs
commonheads = set(og.commonheads)
og.missingheads = [h for h in og.missingheads if h not in commonheads]
@@ -268,8 +268,8 @@
# If there are no obsstore, no post processing are needed.
if repo.obsstore:
torev = repo.changelog.rev
- futureheads = set(torev(h) for h in outgoing.missingheads)
- futureheads |= set(torev(h) for h in outgoing.commonheads)
+ futureheads = {torev(h) for h in outgoing.missingheads}
+ futureheads |= {torev(h) for h in outgoing.commonheads}
allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
for branch, heads in sorted(pycompat.iteritems(headssum)):
remoteheads, newheads, unsyncedheads, placeholder = heads
@@ -452,7 +452,7 @@
if branch not in (b'default', None):
errormsg = _(
b"push creates new remote head %s on branch '%s'!"
- ) % (short(dhs[0]), branch)
+ ) % (short(dhs[0]), branch,)
elif repo[dhs[0]].bookmarks():
errormsg = _(
b"push creates new remote head %s "
--- a/mercurial/dispatch.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/dispatch.py Thu Apr 16 22:51:09 2020 +0530
@@ -10,6 +10,7 @@
import difflib
import errno
import getopt
+import io
import os
import pdb
import re
@@ -144,7 +145,50 @@
if pycompat.ispy3:
def initstdio():
- pass
+ # stdio streams on Python 3 are io.TextIOWrapper instances proxying another
+ # buffer. These streams will normalize \n to \r\n by default. Mercurial's
+ # preferred mechanism for writing output (ui.write()) uses io.BufferedWriter
+ # instances, which write to the underlying stdio file descriptor in binary
+ # mode. ui.write() uses \n for line endings and no line ending normalization
+ # is attempted through this interface. This "just works," even if the system
+ # preferred line ending is not \n.
+ #
+ # But some parts of Mercurial (e.g. hooks) can still send data to sys.stdout
+ # and sys.stderr. They will inherit the line ending normalization settings,
+ # potentially causing e.g. \r\n to be emitted. Since emitting \n should
+ # "just work," here we change the sys.* streams to disable line ending
+ # normalization, ensuring compatibility with our ui type.
+
+ # write_through is new in Python 3.7.
+ kwargs = {
+ "newline": "\n",
+ "line_buffering": sys.stdout.line_buffering,
+ }
+ if util.safehasattr(sys.stdout, "write_through"):
+ kwargs["write_through"] = sys.stdout.write_through
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.buffer, sys.stdout.encoding, sys.stdout.errors, **kwargs
+ )
+
+ kwargs = {
+ "newline": "\n",
+ "line_buffering": sys.stderr.line_buffering,
+ }
+ if util.safehasattr(sys.stderr, "write_through"):
+ kwargs["write_through"] = sys.stderr.write_through
+ sys.stderr = io.TextIOWrapper(
+ sys.stderr.buffer, sys.stderr.encoding, sys.stderr.errors, **kwargs
+ )
+
+ # No write_through on read-only stream.
+ sys.stdin = io.TextIOWrapper(
+ sys.stdin.buffer,
+ sys.stdin.encoding,
+ sys.stdin.errors,
+ # None is universal newlines mode.
+ newline=None,
+ line_buffering=sys.stdin.line_buffering,
+ )
def _silencestdio():
for fp in (sys.stdout, sys.stderr):
@@ -514,7 +558,7 @@
'''
# util.interpolate can't deal with "$@" (with quotes) because it's only
# built to match prefix + patterns.
- replacemap = dict((b'$%d' % (i + 1), arg) for i, arg in enumerate(args))
+ replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
replacemap[b'$0'] = name
replacemap[b'$$'] = b'$'
replacemap[b'$@'] = b' '.join(args)
@@ -624,7 +668,7 @@
except error.AmbiguousCommand:
self.badalias = _(
b"alias '%s' resolves to ambiguous command '%s'"
- ) % (self.name, cmd)
+ ) % (self.name, cmd,)
def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
# confine strings to be passed to i18n.gettext()
--- a/mercurial/encoding.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/encoding.py Thu Apr 16 22:51:09 2020 +0530
@@ -86,10 +86,10 @@
else:
# preferred encoding isn't known yet; use utf-8 to avoid unicode error
# and recreate it once encoding is settled
- environ = dict(
- (k.encode('utf-8'), v.encode('utf-8'))
+ environ = {
+ k.encode('utf-8'): v.encode('utf-8')
for k, v in os.environ.items() # re-exports
- )
+ }
_encodingrewrites = {
b'646': b'ascii',
@@ -285,10 +285,10 @@
if not _nativeenviron:
# now encoding and helper functions are available, recreate the environ
# dict to be exported to other modules
- environ = dict(
- (tolocal(k.encode('utf-8')), tolocal(v.encode('utf-8')))
+ environ = {
+ tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8'))
for k, v in os.environ.items() # re-exports
- )
+ }
if pycompat.ispy3:
# os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
--- a/mercurial/exchange.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/exchange.py Thu Apr 16 22:51:09 2020 +0530
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import collections
+import weakref
from .i18n import _
from .node import (
@@ -856,7 +857,11 @@
for b, scid, dcid in addsrc:
if b in explicit:
explicit.remove(b)
- pushop.outbookmarks.append((b, b'', scid))
+ if bookmod.isdivergent(b):
+ pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
+ pushop.bkresult = 2
+ else:
+ pushop.outbookmarks.append((b, b'', scid))
# search for overwritten bookmark
for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
if b in explicit:
@@ -1675,12 +1680,12 @@
def headsofdiff(h1, h2):
"""Returns heads(h1 % h2)"""
res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
- return set(ctx.node() for ctx in res)
+ return {ctx.node() for ctx in res}
def headsofunion(h1, h2):
"""Returns heads((h1 + h2) - null)"""
res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
- return set(ctx.node() for ctx in res)
+ return {ctx.node() for ctx in res}
while True:
old_heads = unficl.heads()
@@ -1701,6 +1706,25 @@
pullop.rheads = set(pullop.rheads) - pullop.common
+def add_confirm_callback(repo, pullop):
+ """ adds a finalize callback to transaction which can be used to show stats
+ to user and confirm the pull before committing transaction """
+
+ tr = pullop.trmanager.transaction()
+ scmutil.registersummarycallback(
+ repo, tr, txnname=b'pull', as_validator=True
+ )
+ reporef = weakref.ref(repo.unfiltered())
+
+ def prompt(tr):
+ repo = reporef()
+ cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
+ if repo.ui.promptchoice(cm):
+ raise error.Abort("user aborted")
+
+ tr.addvalidator(b'900-pull-prompt', prompt)
+
+
def pull(
repo,
remote,
@@ -1712,6 +1736,7 @@
includepats=None,
excludepats=None,
depth=None,
+ confirm=None,
):
"""Fetch repository data from a remote.
@@ -1736,6 +1761,8 @@
``depth`` is an integer indicating the DAG depth of history we're
interested in. If defined, for each revision specified in ``heads``, we
will fetch up to this many of its ancestors and data associated with them.
+ ``confirm`` is a boolean indicating whether the pull should be confirmed
+ before committing the transaction. This overrides HGPLAIN.
Returns the ``pulloperation`` created for this pull.
"""
@@ -1782,6 +1809,11 @@
if not bookmod.bookmarksinstore(repo):
wlock = repo.wlock()
with wlock, repo.lock(), pullop.trmanager:
+ if confirm or (
+ repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
+ ):
+ add_confirm_callback(repo, pullop)
+
# Use the modern wire protocol, if available.
if remote.capable(b'command-changesetdata'):
exchangev2.pull(pullop)
@@ -3068,7 +3100,15 @@
if not prefers:
return list(entries)
- prefers = [p.split(b'=', 1) for p in prefers]
+ def _split(p):
+ if b'=' not in p:
+ hint = _(b"each comma separated item should be key=value pairs")
+ raise error.Abort(
+ _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
+ )
+ return p.split(b'=', 1)
+
+ prefers = [_split(p) for p in prefers]
items = sorted(clonebundleentry(v, prefers) for v in entries)
return [i.value for i in items]
--- a/mercurial/extensions.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/extensions.py Thu Apr 16 22:51:09 2020 +0530
@@ -787,11 +787,11 @@
try:
from hgext import __index__ # pytype: disable=import-error
- return dict(
- (name, gettext(desc))
+ return {
+ name: gettext(desc)
for name, desc in pycompat.iteritems(__index__.docs)
if name not in _order
- )
+ }
except (ImportError, AttributeError):
pass
@@ -808,18 +808,8 @@
return exts
-def disabledext(name):
- '''find a specific disabled extension from hgext. returns desc'''
- try:
- from hgext import __index__ # pytype: disable=import-error
-
- if name in _order: # enabled
- return
- else:
- return gettext(__index__.docs.get(name))
- except (ImportError, AttributeError):
- pass
-
+def disabled_help(name):
+ """Obtain the full help text for a disabled extension, or None."""
paths = _disabledpaths()
if name in paths:
return _disabledhelp(paths[name])
--- a/mercurial/fancyopts.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/fancyopts.py Thu Apr 16 22:51:09 2020 +0530
@@ -314,7 +314,7 @@
argmap = {}
defmap = {}
negations = {}
- alllong = set(o[1] for o in options)
+ alllong = {o[1] for o in options}
for option in options:
if len(option) == 5:
--- a/mercurial/graphmod.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/graphmod.py Thu Apr 16 22:51:09 2020 +0530
@@ -58,7 +58,7 @@
# partition into parents in the rev set and missing parents, then
# augment the lists with markers, to inform graph drawing code about
# what kind of edge to draw between nodes.
- pset = set(p.rev() for p in ctx.parents() if p.rev() in revs)
+ pset = {p.rev() for p in ctx.parents() if p.rev() in revs}
mpars = [
p.rev()
for p in ctx.parents()
@@ -95,9 +95,9 @@
include = set(nodes)
for node in nodes:
ctx = repo[node]
- parents = set(
+ parents = {
(PARENT, p.rev()) for p in ctx.parents() if p.node() in include
- )
+ }
yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
--- a/mercurial/hbisect.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/hbisect.py Thu Apr 16 22:51:09 2020 +0530
@@ -137,7 +137,7 @@
side = state[b'bad']
else:
side = state[b'good']
- num = len(set(i.node() for i in parents) & set(side))
+ num = len({i.node() for i in parents} & set(side))
if num == 1:
return parents[0].ancestor(parents[1])
return None
--- a/mercurial/help.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/help.py Thu Apr 16 22:51:09 2020 +0530
@@ -153,7 +153,18 @@
return doc
-def optrst(header, options, verbose):
+def parsedefaultmarker(text):
+ """given a text 'abc (DEFAULT: def.ghi)',
+ returns (b'abc', (b'def', b'ghi')). Otherwise return None"""
+ if text[-1:] == b')':
+ marker = b' (DEFAULT: '
+ pos = text.find(marker)
+ if pos >= 0:
+ item = text[pos + len(marker) : -1]
+ return text[:pos], item.split(b'.', 2)
+
+
+def optrst(header, options, verbose, ui):
data = []
multioccur = False
for option in options:
@@ -165,7 +176,14 @@
if not verbose and any(w in desc for w in _exclkeywords):
continue
-
+ defaultstrsuffix = b''
+ if default is None:
+ parseresult = parsedefaultmarker(desc)
+ if parseresult is not None:
+ (desc, (section, name)) = parseresult
+ if ui.configbool(section, name):
+ default = True
+ defaultstrsuffix = _(b' from config')
so = b''
if shortopt:
so = b'-' + shortopt
@@ -183,7 +201,7 @@
defaultstr = pycompat.bytestr(default)
if default is True:
defaultstr = _(b"on")
- desc += _(b" (default: %s)") % defaultstr
+ desc += _(b" (default: %s)") % (defaultstr + defaultstrsuffix)
if isinstance(default, list):
lo += b" %s [+]" % optlabel
@@ -714,11 +732,13 @@
# options
if not ui.quiet and entry[1]:
- rst.append(optrst(_(b"options"), entry[1], ui.verbose))
+ rst.append(optrst(_(b"options"), entry[1], ui.verbose, ui))
if ui.verbose:
rst.append(
- optrst(_(b"global options"), commands.globalopts, ui.verbose)
+ optrst(
+ _(b"global options"), commands.globalopts, ui.verbose, ui
+ )
)
if not ui.verbose:
@@ -858,7 +878,9 @@
elif ui.verbose:
rst.append(
b'\n%s\n'
- % optrst(_(b"global options"), commands.globalopts, ui.verbose)
+ % optrst(
+ _(b"global options"), commands.globalopts, ui.verbose, ui
+ )
)
if name == b'shortlist':
rst.append(
@@ -944,7 +966,7 @@
doc = gettext(pycompat.getdoc(mod)) or _(b'no help text available')
except KeyError:
mod = None
- doc = extensions.disabledext(name)
+ doc = extensions.disabled_help(name)
if not doc:
raise error.UnknownCommand(name)
--- a/mercurial/helptext/config.txt Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/helptext/config.txt Thu Apr 16 22:51:09 2020 +0530
@@ -888,7 +888,8 @@
Compression algorithm used by revlog. Supported values are `zlib` and
`zstd`. The `zlib` engine is the historical default of Mercurial. `zstd` is
a newer format that is usually a net win over `zlib`, operating faster at
- better compression rates. Use `zstd` to reduce CPU usage.
+ better compression rates. Use `zstd` to reduce CPU usage. Multiple values
+ can be specified, the first available one will be used.
On some systems, the Mercurial installation may lack `zstd` support.
@@ -2005,12 +2006,12 @@
Level of allowed race condition between two pushing clients.
- 'strict': push is abort if another client touched the repository
- while the push was preparing. (default)
+ while the push was preparing.
- 'check-related': push is only aborted if it affects head that got also
- affected while the push was preparing.
-
- This requires compatible client (version 4.3 and later). Old client will
- use 'strict'.
+ affected while the push was preparing. (default since 5.4)
+
+ 'check-related' only takes effect for compatible clients (version
+ 4.3 and later). Older clients will use 'strict'.
``validate``
Whether to validate the completeness of pushed changesets by
--- a/mercurial/hg.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/hg.py Thu Apr 16 22:51:09 2020 +0530
@@ -60,12 +60,19 @@
path = util.expandpath(util.urllocalpath(path))
try:
- isfile = os.path.isfile(path)
+ # we use os.stat() directly here instead of os.path.isfile()
+ # because the latter started returning `False` on invalid path
+ # exceptions starting in 3.8 and we care about handling
+ # invalid paths specially here.
+ st = os.stat(path)
+ isfile = stat.S_ISREG(st.st_mode)
# Python 2 raises TypeError, Python 3 ValueError.
except (TypeError, ValueError) as e:
raise error.Abort(
_(b'invalid path %s: %s') % (path, pycompat.bytestr(e))
)
+ except OSError:
+ isfile = False
return isfile and bundlerepo or localrepo
@@ -688,7 +695,7 @@
# data.
createopts[b'lfs'] = True
- if extensions.disabledext(b'lfs'):
+ if extensions.disabled_help(b'lfs'):
ui.status(
_(
b'(remote is using large file support (lfs), but it is '
@@ -1040,10 +1047,9 @@
def clean(repo, node, show_stats=True, quietempty=False):
"""forcibly switch the working directory to node, clobbering changes"""
stats = updaterepo(repo, node, True)
- repo.vfs.unlinkpath(b'graftstate', ignoremissing=True)
+ assert stats.unresolvedcount == 0
if show_stats:
_showstats(repo, stats, quietempty)
- return stats.unresolvedcount > 0
# naming conflict in updatetotally()
@@ -1138,27 +1144,12 @@
def merge(
- repo,
- node,
- force=None,
- remind=True,
- mergeforce=False,
- labels=None,
- abort=False,
+ ctx, force=False, remind=True, labels=None,
):
"""Branch merge with node, resolving changes. Return true if any
unresolved conflicts."""
- if abort:
- return abortmerge(repo.ui, repo)
-
- stats = mergemod.update(
- repo,
- node,
- branchmerge=True,
- force=force,
- mergeforce=mergeforce,
- labels=labels,
- )
+ repo = ctx.repo()
+ stats = mergemod.merge(ctx, force=force, labels=labels)
_showstats(repo, stats)
if stats.unresolvedcount:
repo.ui.status(
@@ -1182,9 +1173,9 @@
node = repo[b'.'].hex()
repo.ui.status(_(b"aborting the merge, updating back to %s\n") % node[:12])
- stats = mergemod.update(repo, node, branchmerge=False, force=True)
+ stats = mergemod.clean_update(repo[node])
+ assert stats.unresolvedcount == 0
_showstats(repo, stats)
- return stats.unresolvedcount > 0
def _incoming(
--- a/mercurial/hgweb/webutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/hgweb/webutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -936,5 +936,5 @@
def getgraphnode(repo, ctx):
return templatekw.getgraphnodecurrent(
- repo, ctx
+ repo, ctx, {}
) + templatekw.getgraphnodesymbol(ctx)
--- a/mercurial/hook.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/hook.py Thu Apr 16 22:51:09 2020 +0530
@@ -7,6 +7,7 @@
from __future__ import absolute_import
+import contextlib
import os
import sys
@@ -259,26 +260,45 @@
return r
+@contextlib.contextmanager
+def redirect_stdio():
+ """Redirects stdout to stderr, if possible."""
+
+ oldstdout = -1
+ try:
+ if _redirect:
+ try:
+ stdoutno = procutil.stdout.fileno()
+ stderrno = procutil.stderr.fileno()
+ # temporarily redirect stdout to stderr, if possible
+ if stdoutno >= 0 and stderrno >= 0:
+ procutil.stdout.flush()
+ oldstdout = os.dup(stdoutno)
+ os.dup2(stderrno, stdoutno)
+ except (OSError, AttributeError):
+ # files seem to be bogus, give up on redirecting (WSGI, etc)
+ pass
+
+ yield
+
+ finally:
+ # The stderr is fully buffered on Windows when connected to a pipe.
+ # A forcible flush is required to make small stderr data in the
+ # remote side available to the client immediately.
+ procutil.stderr.flush()
+
+ if _redirect and oldstdout >= 0:
+ procutil.stdout.flush() # write hook output to stderr fd
+ os.dup2(oldstdout, stdoutno)
+ os.close(oldstdout)
+
+
def runhooks(ui, repo, htype, hooks, throw=False, **args):
args = pycompat.byteskwargs(args)
res = {}
- oldstdout = -1
- try:
+ with redirect_stdio():
for hname, cmd in hooks:
- if oldstdout == -1 and _redirect:
- try:
- stdoutno = procutil.stdout.fileno()
- stderrno = procutil.stderr.fileno()
- # temporarily redirect stdout to stderr, if possible
- if stdoutno >= 0 and stderrno >= 0:
- procutil.stdout.flush()
- oldstdout = os.dup(stdoutno)
- os.dup2(stderrno, stdoutno)
- except (OSError, AttributeError):
- # files seem to be bogus, give up on redirecting (WSGI, etc)
- pass
-
if cmd is _fromuntrusted:
if throw:
raise error.HookAbort(
@@ -312,15 +332,5 @@
raised = False
res[hname] = r, raised
- finally:
- # The stderr is fully buffered on Windows when connected to a pipe.
- # A forcible flush is required to make small stderr data in the
- # remote side available to the client immediately.
- procutil.stderr.flush()
-
- if _redirect and oldstdout >= 0:
- procutil.stdout.flush() # write hook output to stderr fd
- os.dup2(oldstdout, stdoutno)
- os.close(oldstdout)
return res
--- a/mercurial/httpconnection.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/httpconnection.py Thu Apr 16 22:51:09 2020 +0530
@@ -39,12 +39,15 @@
self.write = self._data.write
self.length = os.fstat(self._data.fileno()).st_size
self._pos = 0
+ self._progress = self._makeprogress()
+
+ def _makeprogress(self):
# We pass double the max for total because we currently have
# to send the bundle twice in the case of a server that
# requires authentication. Since we can't know until we try
# once whether authentication will be required, just lie to
# the user and maybe the push succeeds suddenly at 50%.
- self._progress = ui.makeprogress(
+ return self.ui.makeprogress(
_(b'sending'), unit=_(b'kb'), total=(self.length // 1024 * 2)
)
--- a/mercurial/interfaces/repository.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/interfaces/repository.py Thu Apr 16 22:51:09 2020 +0530
@@ -985,18 +985,9 @@
def hasdir(dir):
"""Returns a bool indicating if a directory is in this manifest."""
- def matches(match):
- """Generate a new manifest filtered through a matcher.
-
- Returns an object conforming to the ``imanifestdict`` interface.
- """
-
def walk(match):
"""Generator of paths in manifest satisfying a matcher.
- This is equivalent to ``self.matches(match).iterkeys()`` except a new
- manifest object is not created.
-
If the matcher has explicit files listed and they don't exist in
the manifest, ``match.bad()`` is called for each missing file.
"""
@@ -1027,8 +1018,8 @@
def get(path, default=None):
"""Obtain the node value for a path or a default value if missing."""
- def flags(path, default=b''):
- """Return the flags value for a path or a default value if missing."""
+ def flags(path):
+ """Return the flags value for a path (default: empty bytestring)."""
def copy():
"""Return a copy of this manifest."""
@@ -1061,6 +1052,9 @@
Returns a 2-tuple containing ``bytearray(self.text())`` and the
delta between ``base`` and this manifest.
+
+ If this manifest implementation can't support ``fastdelta()``,
+ raise ``mercurial.manifest.FastdeltaUnavailable``.
"""
@@ -1071,14 +1065,6 @@
as part of a larger interface.
"""
- def new():
- """Obtain a new manifest instance.
-
- Returns an object conforming to the ``imanifestrevisionwritable``
- interface. The instance will be associated with the same
- ``imanifestlog`` collection as this instance.
- """
-
def copy():
"""Obtain a copy of this manifest instance.
--- a/mercurial/localrepo.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/localrepo.py Thu Apr 16 22:51:09 2020 +0530
@@ -699,6 +699,7 @@
# Map of requirements to list of extensions to load automatically when
# requirement is present.
autoextensions = {
+ b'git': [b'git'],
b'largefiles': [b'largefiles'],
b'lfs': [b'lfs'],
}
@@ -932,6 +933,12 @@
if ui.configbool(b'experimental', b'rust.index'):
options[b'rust.index'] = True
+ if ui.configbool(b'experimental', b'exp-persistent-nodemap'):
+ options[b'exp-persistent-nodemap'] = True
+ if ui.configbool(b'experimental', b'exp-persistent-nodemap.mmap'):
+ options[b'exp-persistent-nodemap.mmap'] = True
+ if ui.configbool(b'devel', b'persistent-nodemap'):
+ options[b'devel-force-nodemap'] = True
return options
@@ -1803,7 +1810,7 @@
# map tag name to (node, hist)
alltags = tagsmod.findglobaltags(self.ui, self)
# map tag name to tag type
- tagtypes = dict((tag, b'global') for tag in alltags)
+ tagtypes = {tag: b'global' for tag in alltags}
tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
@@ -1816,12 +1823,10 @@
if node != nullid:
tags[encoding.tolocal(name)] = node
tags[b'tip'] = self.changelog.tip()
- tagtypes = dict(
- [
- (encoding.tolocal(name), value)
- for (name, value) in pycompat.iteritems(tagtypes)
- ]
- )
+ tagtypes = {
+ encoding.tolocal(name): value
+ for (name, value) in pycompat.iteritems(tagtypes)
+ }
return (tags, tagtypes)
def tagtype(self, tagname):
@@ -2173,15 +2178,16 @@
)
if hook.hashook(repo.ui, b'pretxnclose-phase'):
cl = repo.unfiltered().changelog
- for rev, (old, new) in tr.changes[b'phases'].items():
- args = tr.hookargs.copy()
- node = hex(cl.node(rev))
- args.update(phases.preparehookargs(node, old, new))
- repo.hook(
- b'pretxnclose-phase',
- throw=True,
- **pycompat.strkwargs(args)
- )
+ for revs, (old, new) in tr.changes[b'phases']:
+ for rev in revs:
+ args = tr.hookargs.copy()
+ node = hex(cl.node(rev))
+ args.update(phases.preparehookargs(node, old, new))
+ repo.hook(
+ b'pretxnclose-phase',
+ throw=True,
+ **pycompat.strkwargs(args)
+ )
repo.hook(
b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
@@ -2226,7 +2232,7 @@
)
tr.changes[b'origrepolen'] = len(self)
tr.changes[b'obsmarkers'] = set()
- tr.changes[b'phases'] = {}
+ tr.changes[b'phases'] = []
tr.changes[b'bookmarks'] = {}
tr.hookargs[b'txnid'] = txnid
@@ -2260,16 +2266,19 @@
if hook.hashook(repo.ui, b'txnclose-phase'):
cl = repo.unfiltered().changelog
- phasemv = sorted(tr.changes[b'phases'].items())
- for rev, (old, new) in phasemv:
- args = tr.hookargs.copy()
- node = hex(cl.node(rev))
- args.update(phases.preparehookargs(node, old, new))
- repo.hook(
- b'txnclose-phase',
- throw=False,
- **pycompat.strkwargs(args)
- )
+ phasemv = sorted(
+ tr.changes[b'phases'], key=lambda r: r[0][0]
+ )
+ for revs, (old, new) in phasemv:
+ for rev in revs:
+ args = tr.hookargs.copy()
+ node = hex(cl.node(rev))
+ args.update(phases.preparehookargs(node, old, new))
+ repo.hook(
+ b'txnclose-phase',
+ throw=False,
+ **pycompat.strkwargs(args)
+ )
repo.hook(
b'txnclose', throw=False, **pycompat.strkwargs(hookargs)
@@ -2498,6 +2507,9 @@
if full:
unfi = self.unfiltered()
+
+ self.changelog.update_caches(transaction=tr)
+
rbc = unfi.revbranchcache()
for r in unfi.changelog:
rbc.branchinfo(r)
@@ -2843,6 +2855,14 @@
fparent1, fparent2 = fparent2, nullid
elif fparent2 in fparentancestors:
fparent2 = nullid
+ elif not fparentancestors:
+ # TODO: this whole if-else might be simplified much more
+ ms = mergemod.mergestate.read(self)
+ if (
+ fname in ms
+ and ms[fname] == mergemod.MERGE_RECORD_MERGED_OTHER
+ ):
+ fparent1, fparent2 = fparent2, nullid
# is the file changed?
text = fctx.data()
@@ -2938,6 +2958,9 @@
self, status, text, user, date, extra
)
+ ms = mergemod.mergestate.read(self)
+ mergeutil.checkunresolved(ms)
+
# internal config: ui.allowemptycommit
allowemptycommit = (
wctx.branch() != wctx.p1().branch()
@@ -2947,14 +2970,13 @@
or self.ui.configbool(b'ui', b'allowemptycommit')
)
if not allowemptycommit:
+ self.ui.debug(b'nothing to commit, clearing merge state\n')
+ ms.reset()
return None
if merge and cctx.deleted():
raise error.Abort(_(b"cannot commit merge with missing files"))
- ms = mergemod.mergestate.read(self)
- mergeutil.checkunresolved(ms)
-
if editor:
cctx._text = editor(self, cctx, subs)
edited = text != cctx._text
@@ -3572,14 +3594,17 @@
if ui.configbool(b'format', b'dotencode'):
requirements.add(b'dotencode')
- compengine = ui.config(b'format', b'revlog-compression')
- if compengine not in util.compengines:
+ compengines = ui.configlist(b'format', b'revlog-compression')
+ for compengine in compengines:
+ if compengine in util.compengines:
+ break
+ else:
raise error.Abort(
_(
- b'compression engine %s defined by '
+ b'compression engines %s defined by '
b'format.revlog-compression not available'
)
- % compengine,
+ % b', '.join(b'"%s"' % e for e in compengines),
hint=_(
b'run "hg debuginstall" to list available '
b'compression engines'
@@ -3587,7 +3612,7 @@
)
# zlib is the historical default and doesn't need an explicit requirement.
- elif compengine == b'zstd':
+ if compengine == b'zstd':
requirements.add(b'revlog-compression-zstd')
elif compengine != b'zlib':
requirements.add(b'exp-compression-%s' % compengine)
--- a/mercurial/logcmdutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/logcmdutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -1004,7 +1004,7 @@
ui, spec, defaults=templatekw.keywords, resources=tres
)
- def formatnode(repo, ctx):
+ def formatnode(repo, ctx, cache):
props = {b'ctx': ctx, b'repo': repo}
return templ.renderdefault(props)
@@ -1038,8 +1038,9 @@
# experimental config: experimental.graphshorten
state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
+ formatnode_cache = {}
for rev, type, ctx, parents in dag:
- char = formatnode(repo, ctx)
+ char = formatnode(repo, ctx, formatnode_cache)
copies = getcopies(ctx) if getcopies else None
edges = edgefn(type, char, state, rev, parents)
firstedge = next(edges)
--- a/mercurial/manifest.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/manifest.py Thu Apr 16 22:51:09 2020 +0530
@@ -23,6 +23,7 @@
from . import (
encoding,
error,
+ match as matchmod,
mdiff,
pathutil,
policy,
@@ -56,7 +57,12 @@
raise ValueError(b'Manifest lines not in sorted order.')
prev = l
f, n = l.split(b'\0')
- if len(n) > 40:
+ nl = len(n)
+ if 64 < nl:
+ # modern hash, full width
+ yield f, bin(n[:64]), n[64:]
+ elif 40 < nl < 45:
+ # legacy hash, always sha1
yield f, bin(n[:40]), n[40:]
else:
yield f, bin(n), b''
@@ -264,9 +270,15 @@
if pos == -1:
return (data[1], data[2])
zeropos = data.find(b'\x00', pos)
+ nlpos = data.find(b'\n', zeropos)
assert 0 <= needle <= len(self.positions)
assert len(self.extrainfo) == len(self.positions)
- hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
+ hlen = nlpos - zeropos - 1
+ # Hashes sometimes have an extra byte tucked on the end, so
+ # detect that.
+ if hlen % 2:
+ hlen -= 1
+ hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, hlen)
flags = self._getflags(data, needle, zeropos)
return (hashval, flags)
@@ -291,8 +303,13 @@
b"Manifest values must be a tuple of (node, flags)."
)
hashval = value[0]
- if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
- raise TypeError(b"node must be a 20-byte byte string")
+ # hashes are either 20 or 32 bytes (sha1 or its replacement),
+ # and allow one extra byte taht won't be persisted to disk but
+ # is sometimes used in memory.
+ if not isinstance(hashval, bytes) or not (
+ 20 <= len(hashval) <= 22 or 32 <= len(hashval) <= 34
+ ):
+ raise TypeError(b"node must be a 20-byte or 32-byte byte string")
flags = value[1]
if len(hashval) == 22:
hashval = hashval[:-1]
@@ -376,8 +393,13 @@
t = self.extradata[-cur - 1]
l.append(self._pack(t))
self.positions[i] = offset
- if len(t[1]) > 20:
- self.extrainfo[i] = ord(t[1][21])
+ # Hashes are either 20 bytes (old sha1s) or 32
+ # bytes (new non-sha1).
+ hlen = 20
+ if len(t[1]) > 25:
+ hlen = 32
+ if len(t[1]) > hlen:
+ self.extrainfo[i] = ord(t[1][hlen + 1])
offset += len(l[-1])
i += 1
self.data = b''.join(l)
@@ -385,7 +407,11 @@
self.extradata = []
def _pack(self, d):
- return d[0] + b'\x00' + hex(d[1][:20]) + d[2] + b'\n'
+ n = d[1]
+ if len(n) == 21 or len(n) == 33:
+ n = n[:-1]
+ assert len(n) == 20 or len(n) == 32
+ return d[0] + b'\x00' + hex(n) + d[2] + b'\n'
def text(self):
self._compact()
@@ -461,7 +487,7 @@
__bool__ = __nonzero__
def __setitem__(self, key, node):
- self._lm[key] = node, self.flags(key, b'')
+ self._lm[key] = node, self.flags(key)
def __contains__(self, key):
if key is None:
@@ -482,17 +508,11 @@
def filesnotin(self, m2, match=None):
'''Set of files in this manifest that are not in the other'''
- if match:
- m1 = self.matches(match)
- m2 = m2.matches(match)
- return m1.filesnotin(m2)
- diff = self.diff(m2)
- files = set(
- filepath
- for filepath, hashflags in pycompat.iteritems(diff)
- if hashflags[1][0] is None
- )
- return files
+ if match is not None:
+ match = matchmod.badmatch(match, lambda path, msg: None)
+ sm2 = set(m2.walk(match))
+ return {f for f in self.walk(match) if f not in sm2}
+ return {f for f in self if f not in m2}
@propertycache
def _dirs(self):
@@ -531,7 +551,8 @@
# avoid the entire walk if we're only looking for specific files
if self._filesfastpath(match):
for fn in sorted(fset):
- yield fn
+ if fn in self:
+ yield fn
return
for fn in self:
@@ -549,7 +570,7 @@
if not self.hasdir(fn):
match.bad(fn, None)
- def matches(self, match):
+ def _matches(self, match):
'''generate a new manifest filtered by the match argument'''
if match.always():
return self.copy()
@@ -582,8 +603,8 @@
string.
'''
if match:
- m1 = self.matches(match)
- m2 = m2.matches(match)
+ m1 = self._matches(match)
+ m2 = m2._matches(match)
return m1.diff(m2, clean=clean)
return self._lm.diff(m2._lm, clean)
@@ -596,11 +617,11 @@
except KeyError:
return default
- def flags(self, key, default=b''):
+ def flags(self, key):
try:
return self._lm[key][1]
except KeyError:
- return default
+ return b''
def copy(self):
c = manifestdict()
@@ -764,6 +785,7 @@
_noop = lambda s: None
+@interfaceutil.implementer(repository.imanifestdict)
class treemanifest(object):
def __init__(self, dir=b'', text=b''):
self._dir = dir
@@ -1026,7 +1048,12 @@
self._dirs[dir] = treemanifest(self._subpath(dir))
self._dirs[dir].__setitem__(subpath, n)
else:
- self._files[f] = n[:21] # to match manifestdict's behavior
+ # manifest nodes are either 20 bytes or 32 bytes,
+ # depending on the hash in use. An extra byte is
+ # occasionally used by hg, but won't ever be
+ # persisted. Trim to 21 or 33 bytes as appropriate.
+ trim = 21 if len(n) < 25 else 33
+ self._files[f] = n[:trim] # to match manifestdict's behavior
self._dirty = True
def _load(self):
@@ -1079,8 +1106,8 @@
def filesnotin(self, m2, match=None):
'''Set of files in this manifest that are not in the other'''
if match and not match.always():
- m1 = self.matches(match)
- m2 = m2.matches(match)
+ m1 = self._matches(match)
+ m2 = m2._matches(match)
return m1.filesnotin(m2)
files = set()
@@ -1126,9 +1153,6 @@
def walk(self, match):
'''Generates matching file names.
- Equivalent to manifest.matches(match).iterkeys(), but without creating
- an entirely new manifest.
-
It also reports nonexistent files by marking them bad with match.bad().
'''
if match.always():
@@ -1171,16 +1195,16 @@
for f in self._dirs[p]._walk(match):
yield f
- def matches(self, match):
- '''generate a new manifest filtered by the match argument'''
- if match.always():
- return self.copy()
-
- return self._matches(match)
-
def _matches(self, match):
'''recursively generate a new manifest filtered by the match argument.
'''
+ if match.always():
+ return self.copy()
+ return self._matches_inner(match)
+
+ def _matches_inner(self, match):
+ if match.always():
+ return self.copy()
visit = match.visitchildrenset(self._dir[:-1])
if visit == b'all':
@@ -1211,7 +1235,7 @@
for dir, subm in pycompat.iteritems(self._dirs):
if visit and dir[:-1] not in visit:
continue
- m = subm._matches(match)
+ m = subm._matches_inner(match)
if not m._isempty():
ret._dirs[dir] = m
@@ -1219,6 +1243,9 @@
ret._dirty = True
return ret
+ def fastdelta(self, base, changes):
+ raise FastdeltaUnavailable()
+
def diff(self, m2, match=None, clean=False):
'''Finds changes between the current manifest and m2.
@@ -1235,8 +1262,8 @@
string.
'''
if match and not match.always():
- m1 = self.matches(match)
- m2 = m2.matches(match)
+ m1 = self._matches(match)
+ m2 = m2._matches(match)
return m1.diff(m2, clean=clean)
result = {}
emptytree = treemanifest()
@@ -1405,6 +1432,7 @@
set = super(manifestfulltextcache, self).__setitem__
# ignore trailing data, this is a cache, corruption is skipped
while True:
+ # TODO do we need to do work here for sha1 portability?
node = fp.read(20)
if len(node) < 20:
break
@@ -1495,6 +1523,10 @@
MAXCOMPRESSION = 3
+class FastdeltaUnavailable(Exception):
+ """Exception raised when fastdelta isn't usable on a manifest."""
+
+
@interfaceutil.implementer(repository.imanifeststorage)
class manifestrevlog(object):
'''A revlog that stores manifest texts. This is responsible for caching the
@@ -1621,7 +1653,9 @@
readtree=None,
match=None,
):
- if p1 in self.fulltextcache and util.safehasattr(m, b'fastdelta'):
+ try:
+ if p1 not in self.fulltextcache:
+ raise FastdeltaUnavailable()
# If our first parent is in the manifest cache, we can
# compute a delta here using properties we know about the
# manifest up-front, which may save time later for the
@@ -1640,11 +1674,12 @@
n = self._revlog.addrevision(
text, transaction, link, p1, p2, cachedelta
)
- else:
- # The first parent manifest isn't already loaded, so we'll
- # just encode a fulltext of the manifest and pass that
- # through to the revlog layer, and let it handle the delta
- # process.
+ except FastdeltaUnavailable:
+ # The first parent manifest isn't already loaded or the
+ # manifest implementation doesn't support fastdelta, so
+ # we'll just encode a fulltext of the manifest and pass
+ # that through to the revlog layer, and let it handle the
+ # delta process.
if self._treeondisk:
assert readtree, b"readtree must be set for treemanifest writes"
assert match, b"match must be specified for treemanifest writes"
@@ -1923,9 +1958,6 @@
def _storage(self):
return self._manifestlog.getstorage(b'')
- def new(self):
- return memmanifestctx(self._manifestlog)
-
def copy(self):
memmf = memmanifestctx(self._manifestlog)
memmf._manifestdict = self.read().copy()
@@ -1972,9 +2004,6 @@
def node(self):
return self._node
- def new(self):
- return memmanifestctx(self._manifestlog)
-
def copy(self):
memmf = memmanifestctx(self._manifestlog)
memmf._manifestdict = self.read().copy()
@@ -2039,9 +2068,6 @@
def _storage(self):
return self._manifestlog.getstorage(b'')
- def new(self, dir=b''):
- return memtreemanifestctx(self._manifestlog, dir=dir)
-
def copy(self):
memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
memmf._treemanifest = self._treemanifest.copy()
@@ -2124,9 +2150,6 @@
def node(self):
return self._node
- def new(self, dir=b''):
- return memtreemanifestctx(self._manifestlog, dir=dir)
-
def copy(self):
memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
memmf._treemanifest = self.read().copy()
--- a/mercurial/match.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/match.py Thu Apr 16 22:51:09 2020 +0530
@@ -24,7 +24,7 @@
)
from .utils import stringutil
-rustmod = policy.importrust('filepatterns')
+rustmod = policy.importrust('dirstate')
allpatternkinds = (
b're',
@@ -666,7 +666,10 @@
class includematcher(basematcher):
def __init__(self, root, kindpats, badfn=None):
super(includematcher, self).__init__(badfn)
-
+ if rustmod is not None:
+ # We need to pass the patterns to Rust because they can contain
+ # patterns from the user interface
+ self._kindpats = kindpats
self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
self._prefix = _prefix(kindpats)
roots, dirs, parents = _rootsdirsandparents(kindpats)
@@ -772,7 +775,7 @@
candidates = self._fileset | self._dirs - {b''}
if dir != b'':
d = dir + b'/'
- candidates = set(c[len(d) :] for c in candidates if c.startswith(d))
+ candidates = {c[len(d) :] for c in candidates if c.startswith(d)}
# self._dirs includes all of the directories, recursively, so if
# we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
# 'foo/bar' in it. Thus we can safely ignore a candidate that has a
@@ -1273,15 +1276,6 @@
'''Convert a (normalized) pattern of any kind into a
regular expression.
globsuffix is appended to the regexp of globs.'''
-
- if rustmod is not None:
- try:
- return rustmod.build_single_regex(kind, pat, globsuffix)
- except rustmod.PatternError:
- raise error.ProgrammingError(
- b'not a regex pattern: %s:%s' % (kind, pat)
- )
-
if not pat and kind in (b'glob', b'relpath'):
return b''
if kind == b're':
@@ -1554,18 +1548,6 @@
This is useful to debug ignore patterns.
'''
- if rustmod is not None:
- result, warnings = rustmod.read_pattern_file(
- filepath, bool(warn), sourceinfo,
- )
-
- for warning_params in warnings:
- # Can't be easily emitted from Rust, because it would require
- # a mechanism for both gettext and calling the `warn` function.
- warn(_(b"%s: ignoring invalid syntax '%s'\n") % warning_params)
-
- return result
-
syntaxes = {
b're': b'relre:',
b'regexp': b'relre:',
--- a/mercurial/mdiff.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/mdiff.py Thu Apr 16 22:51:09 2020 +0530
@@ -91,7 +91,7 @@
)
def copy(self, **kwargs):
- opts = dict((k, getattr(self, k)) for k in self.defaults)
+ opts = {k: getattr(self, k) for k in self.defaults}
opts = pycompat.strkwargs(opts)
opts.update(kwargs)
return diffopts(**opts)
--- a/mercurial/merge.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/merge.py Thu Apr 16 22:51:09 2020 +0530
@@ -64,6 +64,7 @@
RECORD_OVERRIDE = b't'
RECORD_UNSUPPORTED_MANDATORY = b'X'
RECORD_UNSUPPORTED_ADVISORY = b'x'
+RECORD_RESOLVED_OTHER = b'R'
MERGE_DRIVER_STATE_UNMARKED = b'u'
MERGE_DRIVER_STATE_MARKED = b'm'
@@ -74,6 +75,9 @@
MERGE_RECORD_UNRESOLVED_PATH = b'pu'
MERGE_RECORD_RESOLVED_PATH = b'pr'
MERGE_RECORD_DRIVER_RESOLVED = b'd'
+# represents that the file was automatically merged in favor
+# of other version. This info is used on commit.
+MERGE_RECORD_MERGED_OTHER = b'o'
ACTION_FORGET = b'f'
ACTION_REMOVE = b'r'
@@ -91,6 +95,8 @@
ACTION_KEEP = b'k'
ACTION_EXEC = b'e'
ACTION_CREATED_MERGE = b'cm'
+# GET the other/remote side and store this info in mergestate
+ACTION_GET_OTHER_AND_STORE = b'gs'
class mergestate(object):
@@ -227,6 +233,7 @@
RECORD_CHANGEDELETE_CONFLICT,
RECORD_PATH_CONFLICT,
RECORD_MERGE_DRIVER_MERGE,
+ RECORD_RESOLVED_OTHER,
):
bits = record.split(b'\0')
self._state[bits[0]] = bits[1:]
@@ -386,18 +393,26 @@
return configmergedriver
@util.propertycache
- def localctx(self):
+ def local(self):
if self._local is None:
- msg = b"localctx accessed but self._local isn't set"
+ msg = b"local accessed but self._local isn't set"
raise error.ProgrammingError(msg)
- return self._repo[self._local]
+ return self._local
+
+ @util.propertycache
+ def localctx(self):
+ return self._repo[self.local]
+
+ @util.propertycache
+ def other(self):
+ if self._other is None:
+ msg = b"other accessed but self._other isn't set"
+ raise error.ProgrammingError(msg)
+ return self._other
@util.propertycache
def otherctx(self):
- if self._other is None:
- msg = b"otherctx accessed but self._other isn't set"
- raise error.ProgrammingError(msg)
- return self._repo[self._other]
+ return self._repo[self.other]
def active(self):
"""Whether mergestate is active.
@@ -405,14 +420,7 @@
Returns True if there appears to be mergestate. This is a rough proxy
for "is a merge in progress."
"""
- # Check local variables before looking at filesystem for performance
- # reasons.
- return (
- bool(self._local)
- or bool(self._state)
- or self._repo.vfs.exists(self.statepathv1)
- or self._repo.vfs.exists(self.statepathv2)
- )
+ return bool(self._local) or bool(self._state)
def commit(self):
"""Write current state on disk (if necessary)"""
@@ -452,6 +460,10 @@
records.append(
(RECORD_PATH_CONFLICT, b'\0'.join([filename] + v))
)
+ elif v[0] == MERGE_RECORD_MERGED_OTHER:
+ records.append(
+ (RECORD_RESOLVED_OTHER, b'\0'.join([filename] + v))
+ )
elif v[1] == nullhex or v[6] == nullhex:
# Change/Delete or Delete/Change conflicts. These are stored in
# 'C' records. v[1] is the local file, and is nullhex when the
@@ -550,6 +562,10 @@
self._state[path] = [MERGE_RECORD_UNRESOLVED_PATH, frename, forigin]
self._dirty = True
+ def addmergedother(self, path):
+ self._state[path] = [MERGE_RECORD_MERGED_OTHER, nullhex, nullhex]
+ self._dirty = True
+
def __contains__(self, dfile):
return dfile in self._state
@@ -593,6 +609,8 @@
"""rerun merge process for file path `dfile`"""
if self[dfile] in (MERGE_RECORD_RESOLVED, MERGE_RECORD_DRIVER_RESOLVED):
return True, 0
+ if self._state[dfile][0] == MERGE_RECORD_MERGED_OTHER:
+ return True, 0
stateentry = self._state[dfile]
state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry
octx = self._repo[self._other]
@@ -989,11 +1007,10 @@
"""
Check for case-folding collisions.
"""
-
# If the repo is narrowed, filter out files outside the narrowspec.
narrowmatch = repo.narrowmatch()
if not narrowmatch.always():
- wmf = wmf.matches(narrowmatch)
+ pmmf = set(wmf.walk(narrowmatch))
if actions:
narrowactions = {}
for m, actionsfortype in pycompat.iteritems(actions):
@@ -1002,9 +1019,9 @@
if narrowmatch(f):
narrowactions[m].append((f, args, msg))
actions = narrowactions
-
- # build provisional merged manifest up
- pmmf = set(wmf)
+ else:
+ # build provisional merged manifest up
+ pmmf = set(wmf)
if actions:
# KEEP and EXEC are no-op
@@ -1209,7 +1226,7 @@
narrowed.
"""
nooptypes = {b'k'} # TODO: handle with nonconflicttypes
- nonconflicttypes = set(b'a am c cm f g r e'.split())
+ nonconflicttypes = set(b'a am c cm f g gs r e'.split())
# We mutate the items in the dict during iteration, so iterate
# over a copy.
for f, action in list(actions.items()):
@@ -1256,17 +1273,19 @@
if matcher is not None and matcher.always():
matcher = None
- copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
-
# manifests fetched in order are going to be faster, so prime the caches
[
x.manifest()
for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
]
+ branch_copies1 = copies.branch_copies()
+ branch_copies2 = copies.branch_copies()
+ diverge = {}
if followcopies:
- ret = copies.mergecopies(repo, wctx, p2, pa)
- copy, movewithdir, diverge, renamedelete, dirmove = ret
+ branch_copies1, branch_copies2, diverge = copies.mergecopies(
+ repo, wctx, p2, pa
+ )
boolbm = pycompat.bytestr(bool(branchmerge))
boolf = pycompat.bytestr(bool(force))
@@ -1278,8 +1297,10 @@
repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
- copied = set(copy.values())
- copied.update(movewithdir.values())
+ copied1 = set(branch_copies1.copy.values())
+ copied1.update(branch_copies1.movewithdir.values())
+ copied2 = set(branch_copies2.copy.values())
+ copied2.update(branch_copies2.movewithdir.values())
if b'.hgsubstate' in m1 and wctx.rev() is None:
# Check whether sub state is modified, and overwrite the manifest
@@ -1299,10 +1320,10 @@
relevantfiles = set(ma.diff(m2).keys())
# For copied and moved files, we need to add the source file too.
- for copykey, copyvalue in pycompat.iteritems(copy):
+ for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
if copyvalue in relevantfiles:
relevantfiles.add(copykey)
- for movedirkey in movewithdir:
+ for movedirkey in branch_copies1.movewithdir:
relevantfiles.add(movedirkey)
filesmatcher = scmutil.matchfiles(repo, relevantfiles)
matcher = matchmod.intersectmatchers(matcher, filesmatcher)
@@ -1313,7 +1334,10 @@
for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
if n1 and n2: # file exists on both local and remote side
if f not in ma:
- fa = copy.get(f, None)
+ # TODO: what if they're renamed from different sources?
+ fa = branch_copies1.copy.get(
+ f, None
+ ) or branch_copies2.copy.get(f, None)
if fa is not None:
actions[f] = (
ACTION_MERGE,
@@ -1341,14 +1365,22 @@
)
else:
actions[f] = (
- ACTION_GET,
+ ACTION_GET_OTHER_AND_STORE
+ if branchmerge
+ else ACTION_GET,
(fl2, False),
b'remote is newer',
)
elif nol and n2 == a: # remote only changed 'x'
actions[f] = (ACTION_EXEC, (fl2,), b'update permissions')
elif nol and n1 == a: # local only changed 'x'
- actions[f] = (ACTION_GET, (fl1, False), b'remote is newer')
+ actions[f] = (
+ ACTION_GET_OTHER_AND_STORE
+ if branchmerge
+ else ACTION_GET,
+ (fl1, False),
+ b'remote is newer',
+ )
else: # both changed something
actions[f] = (
ACTION_MERGE,
@@ -1356,10 +1388,12 @@
b'versions differ',
)
elif n1: # file exists only on local side
- if f in copied:
+ if f in copied2:
pass # we'll deal with it on m2 side
- elif f in movewithdir: # directory rename, move local
- f2 = movewithdir[f]
+ elif (
+ f in branch_copies1.movewithdir
+ ): # directory rename, move local
+ f2 = branch_copies1.movewithdir[f]
if f2 in m2:
actions[f2] = (
ACTION_MERGE,
@@ -1372,8 +1406,8 @@
(f, fl1),
b'remote directory rename - move from %s' % f,
)
- elif f in copy:
- f2 = copy[f]
+ elif f in branch_copies1.copy:
+ f2 = branch_copies1.copy[f]
actions[f] = (
ACTION_MERGE,
(f, f2, f2, False, pa.node()),
@@ -1397,10 +1431,10 @@
else:
actions[f] = (ACTION_REMOVE, None, b'other deleted')
elif n2: # file exists only on remote side
- if f in copied:
+ if f in copied1:
pass # we'll deal with it on m1 side
- elif f in movewithdir:
- f2 = movewithdir[f]
+ elif f in branch_copies2.movewithdir:
+ f2 = branch_copies2.movewithdir[f]
if f2 in m1:
actions[f2] = (
ACTION_MERGE,
@@ -1413,8 +1447,8 @@
(f, fl2),
b'local directory rename - get from %s' % f,
)
- elif f in copy:
- f2 = copy[f]
+ elif f in branch_copies2.copy:
+ f2 = branch_copies2.copy[f]
if f2 in m2:
actions[f] = (
ACTION_MERGE,
@@ -1451,10 +1485,10 @@
)
elif n2 != ma[f]:
df = None
- for d in dirmove:
+ for d in branch_copies1.dirmove:
if f.startswith(d):
# new file added in a directory that was moved
- df = dirmove[d] + f[len(d) :]
+ df = branch_copies1.dirmove[d] + f[len(d) :]
break
if df is not None and df in m1:
actions[df] = (
@@ -1481,6 +1515,9 @@
# Updates "actions" in place
_filternarrowactions(narrowmatch, branchmerge, actions)
+ renamedelete = branch_copies1.renamedelete
+ renamedelete.update(branch_copies2.renamedelete)
+
return actions, diverge, renamedelete
@@ -1576,6 +1613,8 @@
for f, a in sorted(pycompat.iteritems(actions)):
m, args, msg = a
+ if m == ACTION_GET_OTHER_AND_STORE:
+ m = ACTION_GET
repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
if f in fbids:
d = fbids[f]
@@ -1784,8 +1823,8 @@
def emptyactions():
"""create an actions dict, to be populated and passed to applyupdates()"""
- return dict(
- (m, [])
+ return {
+ m: []
for m in (
ACTION_ADD,
ACTION_ADD_MODIFIED,
@@ -1801,8 +1840,9 @@
ACTION_KEEP,
ACTION_PATH_CONFLICT,
ACTION_PATH_CONFLICT_RESOLVE,
+ ACTION_GET_OTHER_AND_STORE,
)
- )
+ }
def applyupdates(
@@ -1823,6 +1863,11 @@
updated, merged, removed = 0, 0, 0
ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
+
+ # add ACTION_GET_OTHER_AND_STORE to mergestate
+ for e in actions[ACTION_GET_OTHER_AND_STORE]:
+ ms.addmergedother(e[0])
+
moves = []
for m, l in actions.items():
l.sort()
@@ -2058,7 +2103,7 @@
extraactions = ms.actions()
if extraactions:
- mfiles = set(a[0] for a in actions[ACTION_MERGE])
+ mfiles = {a[0] for a in actions[ACTION_MERGE]}
for k, acts in pycompat.iteritems(extraactions):
actions[k].extend(acts)
if k == ACTION_GET and wantfiledata:
@@ -2205,6 +2250,7 @@
labels=None,
matcher=None,
mergeforce=False,
+ updatedirstate=True,
updatecheck=None,
wc=None,
):
@@ -2288,13 +2334,6 @@
),
)
)
- # If we're doing a partial update, we need to skip updating
- # the dirstate, so make a note of any partial-ness to the
- # update here.
- if matcher is None or matcher.always():
- partial = False
- else:
- partial = True
with repo.wlock():
if wc is None:
wc = repo[None]
@@ -2409,6 +2448,7 @@
ACTION_EXEC,
ACTION_REMOVE,
ACTION_PATH_CONFLICT_RESOLVE,
+ ACTION_GET_OTHER_AND_STORE,
):
msg = _(b"conflicting changes")
hint = _(b"commit or update --clean to discard changes")
@@ -2471,6 +2511,10 @@
actions[m] = []
actions[m].append((f, args, msg))
+ # ACTION_GET_OTHER_AND_STORE is a ACTION_GET + store in mergestate
+ for e in actions[ACTION_GET_OTHER_AND_STORE]:
+ actions[ACTION_GET].append(e)
+
if not util.fscasesensitive(repo.path):
# check collision between files only in p2 for clean update
if not branchmerge and (
@@ -2507,7 +2551,11 @@
### apply phase
if not branchmerge: # just jump to the new rev
fp1, fp2, xp1, xp2 = fp2, nullid, xp2, b''
- if not partial and not wc.isinmemory():
+ # If we're doing a partial update, we need to skip updating
+ # the dirstate.
+ always = matcher is None or matcher.always()
+ updatedirstate = updatedirstate and always and not wc.isinmemory()
+ if updatedirstate:
repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
# note that we're in the middle of an update
repo.vfs.write(b'updatestate', p2.hex())
@@ -2553,7 +2601,6 @@
)
)
- updatedirstate = not partial and not wc.isinmemory()
wantfiledata = updatedirstate and not branchmerge
stats, getfiledata = applyupdates(
repo, actions, wc, p2, overwrite, wantfiledata, labels=labels
@@ -2574,15 +2621,65 @@
if not branchmerge:
sparse.prunetemporaryincludes(repo)
- if not partial:
+ if updatedirstate:
repo.hook(
b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
)
return stats
+def merge(ctx, labels=None, force=False, wc=None):
+ """Merge another topological branch into the working copy.
+
+ force = whether the merge was run with 'merge --force' (deprecated)
+ """
+
+ return update(
+ ctx.repo(),
+ ctx.rev(),
+ labels=labels,
+ branchmerge=True,
+ force=force,
+ mergeforce=force,
+ wc=wc,
+ )
+
+
+def clean_update(ctx, wc=None):
+ """Do a clean update to the given commit.
+
+ This involves updating to the commit and discarding any changes in the
+ working copy.
+ """
+ return update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
+
+
+def revert_to(ctx, matcher=None, wc=None):
+ """Revert the working copy to the given commit.
+
+ The working copy will keep its current parent(s) but its content will
+ be the same as in the given commit.
+ """
+
+ return update(
+ ctx.repo(),
+ ctx.rev(),
+ branchmerge=False,
+ force=True,
+ updatedirstate=False,
+ matcher=matcher,
+ wc=wc,
+ )
+
+
def graft(
- repo, ctx, base, labels=None, keepparent=False, keepconflictparent=False
+ repo,
+ ctx,
+ base=None,
+ labels=None,
+ keepparent=False,
+ keepconflictparent=False,
+ wctx=None,
):
"""Do a graft-like merge.
@@ -2593,7 +2690,7 @@
renames/copies appropriately.
ctx - changeset to rebase
- base - merge base, usually ctx.p1()
+ base - merge base, or ctx.p1() if not specified
labels - merge labels eg ['local', 'graft']
keepparent - keep second parent if any
keepconflictparent - if unresolved, keep parent used for the merge
@@ -2605,9 +2702,15 @@
# to copy commits), and 2) informs update that the incoming changes are
# newer than the destination so it doesn't prompt about "remote changed foo
# which local deleted".
- wctx = repo[None]
+ # We also pass mergeancestor=True when base is the same revision as p1. 2)
+ # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
+ wctx = wctx or repo[None]
pctx = wctx.p1()
- mergeancestor = repo.changelog.isancestor(pctx.node(), ctx.node())
+ base = base or ctx.p1()
+ mergeancestor = (
+ repo.changelog.isancestor(pctx.node(), ctx.node())
+ or pctx.rev() == base.rev()
+ )
stats = update(
repo,
@@ -2617,6 +2720,7 @@
base.node(),
mergeancestor=mergeancestor,
labels=labels,
+ wc=wctx,
)
if keepconflictparent and stats.unresolvedcount:
@@ -2631,17 +2735,23 @@
if pother == pctx.node():
pother = nullid
- with repo.dirstate.parentchange():
- repo.setparents(pctx.node(), pother)
- repo.dirstate.write(repo.currenttransaction())
+ if wctx.isinmemory():
+ wctx.setparents(pctx.node(), pother)
# fix up dirstate for copies and renames
copies.graftcopies(wctx, ctx, base)
+ else:
+ with repo.dirstate.parentchange():
+ repo.setparents(pctx.node(), pother)
+ repo.dirstate.write(repo.currenttransaction())
+ # fix up dirstate for copies and renames
+ copies.graftcopies(wctx, ctx, base)
return stats
def purge(
repo,
matcher,
+ unknown=True,
ignored=False,
removeemptydirs=True,
removefiles=True,
@@ -2653,7 +2763,9 @@
``matcher`` is a matcher configured to scan the working directory -
potentially a subset.
- ``ignored`` controls whether ignored files should also be purged.
+ ``unknown`` controls whether unknown files should be purged.
+
+ ``ignored`` controls whether ignored files should be purged.
``removeemptydirs`` controls whether empty directories should be removed.
@@ -2690,7 +2802,7 @@
directories = []
matcher.traversedir = directories.append
- status = repo.status(match=matcher, ignored=ignored, unknown=True)
+ status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
if removefiles:
for f in sorted(status.unknown + status.ignored):
--- a/mercurial/namespaces.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/namespaces.py Thu Apr 16 22:51:09 2020 +0530
@@ -83,6 +83,9 @@
def __iter__(self):
return self._names.__iter__()
+ def get(self, namespace, default=None):
+ return self._names.get(namespace, default)
+
def items(self):
return pycompat.iteritems(self._names)
--- a/mercurial/narrowspec.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/narrowspec.py Thu Apr 16 22:51:09 2020 +0530
@@ -233,21 +233,6 @@
:param repo_includes: repo includes
:param repo_excludes: repo excludes
:return: include patterns, exclude patterns, and invalid include patterns.
-
- >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
- (set(['f1']), {}, [])
- >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
- (set(['f1']), {}, [])
- >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
- (set(['f1/fc1']), {}, [])
- >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
- ([], set(['path:.']), [])
- >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
- (set(['f2/fc2']), {}, [])
- >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
- ([], set(['path:.']), [])
- >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
- (set(['f1/$non_exitent_var']), {}, [])
"""
res_excludes = set(req_excludes)
res_excludes.update(repo_excludes)
--- a/mercurial/obsolete.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/obsolete.py Thu Apr 16 22:51:09 2020 +0530
@@ -939,7 +939,7 @@
getnode = repo.changelog.node
notpublic = _mutablerevs(repo)
isobs = repo.obsstore.successors.__contains__
- obs = set(r for r in notpublic if isobs(getnode(r)))
+ obs = {r for r in notpublic if isobs(getnode(r))}
return obs
@@ -965,7 +965,7 @@
def _computesuspendedset(repo):
"""the set of obsolete parents with non obsolete descendants"""
suspended = repo.changelog.ancestors(getrevs(repo, b'orphan'))
- return set(r for r in getrevs(repo, b'obsolete') if r in suspended)
+ return {r for r in getrevs(repo, b'obsolete') if r in suspended}
@cachefor(b'extinct')
--- a/mercurial/obsutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/obsutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -194,7 +194,7 @@
def _filterprunes(markers):
"""return a set with no prune markers"""
- return set(m for m in markers if m[1])
+ return {m for m in markers if m[1]}
def exclusivemarkers(repo, nodes):
@@ -338,12 +338,12 @@
# compute the whole set of successors or descendants
while len(foreground) != plen:
plen = len(foreground)
- succs = set(c.node() for c in foreground)
+ succs = {c.node() for c in foreground}
mutable = [c.node() for c in foreground if c.mutable()]
succs.update(allsuccessors(repo.obsstore, mutable))
known = (n for n in succs if has_node(n))
foreground = set(repo.set(b'%ln::', known))
- return set(c.node() for c in foreground)
+ return {c.node() for c in foreground}
# effectflag field
@@ -855,11 +855,11 @@
""" Returns a sorted list of markers users without duplicates
"""
markersmeta = [dict(m[3]) for m in markers]
- users = set(
+ users = {
encoding.tolocal(meta[b'user'])
for meta in markersmeta
if meta.get(b'user')
- )
+ }
return sorted(users)
@@ -868,9 +868,9 @@
""" Returns a sorted list of markers operations without duplicates
"""
markersmeta = [dict(m[3]) for m in markers]
- operations = set(
+ operations = {
meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
- )
+ }
return sorted(operations)
--- a/mercurial/patch.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/patch.py Thu Apr 16 22:51:09 2020 +0530
@@ -2888,7 +2888,7 @@
or 'rename' (the latter two only if opts.git is set).'''
gone = set()
- copyto = dict([(v, k) for k, v in copy.items()])
+ copyto = {v: k for k, v in copy.items()}
addedset, removedset = set(added), set(removed)
--- a/mercurial/pathutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/pathutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -84,7 +84,7 @@
_(b"path contains illegal component: %s") % path
)
if b'.hg' in _lowerclean(path):
- lparts = [_lowerclean(p.lower()) for p in parts]
+ lparts = [_lowerclean(p) for p in parts]
for p in b'.hg', b'.hg.':
if p in lparts[1:]:
pos = lparts.index(p)
@@ -99,10 +99,11 @@
parts.pop()
normparts.pop()
- prefixes = []
# It's important that we check the path parts starting from the root.
- # This means we won't accidentally traverse a symlink into some other
- # filesystem (which is potentially expensive to access).
+ # We don't want to add "foo/bar/baz" to auditeddir before checking if
+ # there's a "foo/.hg" directory. This also means we won't accidentally
+ # traverse a symlink into some other filesystem (which is potentially
+ # expensive to access).
for i in range(len(parts)):
prefix = pycompat.ossep.join(parts[: i + 1])
normprefix = pycompat.ossep.join(normparts[: i + 1])
@@ -110,13 +111,11 @@
continue
if self._realfs:
self._checkfs(prefix, path)
- prefixes.append(normprefix)
+ if self._cached:
+ self.auditeddir.add(normprefix)
if self._cached:
self.audited.add(normpath)
- # only add prefixes to the cache after checking everything: we don't
- # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
- self.auditeddir.update(prefixes)
def _checkfs(self, prefix, path):
"""raise exception if a file system backed check fails"""
@@ -287,6 +286,9 @@
'''a multiset of directory names from a set of file paths'''
def __init__(self, map, skip=None):
+ '''
+ a dict map indicates a dirstate while a list indicates a manifest
+ '''
self._dirs = {}
addpath = self.addpath
if isinstance(map, dict) and skip is not None:
--- a/mercurial/phases.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/phases.py Thu Apr 16 22:51:09 2020 +0530
@@ -216,17 +216,101 @@
return headsbyphase
+def _sortedrange_insert(data, idx, rev, t):
+ merge_before = False
+ if idx:
+ r1, t1 = data[idx - 1]
+ merge_before = r1[-1] + 1 == rev and t1 == t
+ merge_after = False
+ if idx < len(data):
+ r2, t2 = data[idx]
+ merge_after = r2[0] == rev + 1 and t2 == t
+
+ if merge_before and merge_after:
+ data[idx - 1] = (pycompat.xrange(r1[0], r2[-1] + 1), t)
+ data.pop(idx)
+ elif merge_before:
+ data[idx - 1] = (pycompat.xrange(r1[0], rev + 1), t)
+ elif merge_after:
+ data[idx] = (pycompat.xrange(rev, r2[-1] + 1), t)
+ else:
+ data.insert(idx, (pycompat.xrange(rev, rev + 1), t))
+
+
+def _sortedrange_split(data, idx, rev, t):
+ r1, t1 = data[idx]
+ if t == t1:
+ return
+ t = (t1[0], t[1])
+ if len(r1) == 1:
+ data.pop(idx)
+ _sortedrange_insert(data, idx, rev, t)
+ elif r1[0] == rev:
+ data[idx] = (pycompat.xrange(rev + 1, r1[-1] + 1), t1)
+ _sortedrange_insert(data, idx, rev, t)
+ elif r1[-1] == rev:
+ data[idx] = (pycompat.xrange(r1[0], rev), t1)
+ _sortedrange_insert(data, idx + 1, rev, t)
+ else:
+ data[idx : idx + 1] = [
+ (pycompat.xrange(r1[0], rev), t1),
+ (pycompat.xrange(rev, rev + 1), t),
+ (pycompat.xrange(rev + 1, r1[-1] + 1), t1),
+ ]
+
+
def _trackphasechange(data, rev, old, new):
- """add a phase move the <data> dictionnary
+ """add a phase move to the <data> list of ranges
If data is None, nothing happens.
"""
if data is None:
return
- existing = data.get(rev)
- if existing is not None:
- old = existing[0]
- data[rev] = (old, new)
+
+ # If data is empty, create a one-revision range and done
+ if not data:
+ data.insert(0, (pycompat.xrange(rev, rev + 1), (old, new)))
+ return
+
+ low = 0
+ high = len(data)
+ t = (old, new)
+ while low < high:
+ mid = (low + high) // 2
+ revs = data[mid][0]
+
+ if rev in revs:
+ _sortedrange_split(data, mid, rev, t)
+ return
+
+ if revs[0] == rev + 1:
+ if mid and data[mid - 1][0][-1] == rev:
+ _sortedrange_split(data, mid - 1, rev, t)
+ else:
+ _sortedrange_insert(data, mid, rev, t)
+ return
+
+ if revs[-1] == rev - 1:
+ if mid + 1 < len(data) and data[mid + 1][0][0] == rev:
+ _sortedrange_split(data, mid + 1, rev, t)
+ else:
+ _sortedrange_insert(data, mid + 1, rev, t)
+ return
+
+ if revs[0] > rev:
+ high = mid
+ else:
+ low = mid + 1
+
+ if low == len(data):
+ data.append((pycompat.xrange(rev, rev + 1), t))
+ return
+
+ r1, t1 = data[low]
+ if r1[0] > rev:
+ data.insert(low, (pycompat.xrange(rev, rev + 1), t))
+ else:
+ data.insert(low + 1, (pycompat.xrange(rev, rev + 1), t))
class phasecache(object):
@@ -400,8 +484,9 @@
phasetracking = tr.changes[b'phases']
torev = repo.changelog.rev
phase = self.phase
- for n in nodes:
- rev = torev(n)
+ revs = [torev(node) for node in nodes]
+ revs.sort()
+ for rev in revs:
revphase = phase(repo, rev)
_trackphasechange(phasetracking, rev, None, revphase)
repo.invalidatevolatilesets()
@@ -445,10 +530,10 @@
phasetracking, r, self.phase(repo, r), targetphase
)
- roots = set(
+ roots = {
ctx.node()
for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected)
- )
+ }
if olds != roots:
self._updateroots(phase, roots, tr)
# some roots may need to be declared for lower phases
@@ -485,7 +570,7 @@
affected -= revs
else: # public phase
revs = affected
- for r in revs:
+ for r in sorted(revs):
_trackphasechange(phasetracking, r, phase, targetphase)
repo.invalidatevolatilesets()
@@ -518,9 +603,7 @@
]
updatedroots = repo.set(b'roots(%ln::)', aboveroots)
- finalroots = set(
- n for n in currentroots if repo[n].rev() < minnewroot
- )
+ finalroots = {n for n in currentroots if repo[n].rev() < minnewroot}
finalroots.update(ctx.node() for ctx in updatedroots)
if finalroots != oldroots:
self._updateroots(targetphase, finalroots, tr)
@@ -760,7 +843,7 @@
if not heads or heads == [nullid]:
return []
# The logic operated on revisions, convert arguments early for convenience
- new_heads = set(rev(n) for n in heads if n != nullid)
+ new_heads = {rev(n) for n in heads if n != nullid}
roots = [rev(n) for n in roots]
# compute the area we need to remove
affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads)
--- a/mercurial/posix.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/posix.py Thu Apr 16 22:51:09 2020 +0530
@@ -324,9 +324,8 @@
open(fullpath, b'w').close()
except IOError as inst:
if (
- inst[0] # pytype: disable=unsupported-operands
- == errno.EACCES
- ):
+ inst[0] == errno.EACCES
+ ): # pytype: disable=unsupported-operands
# If we can't write to cachedir, just pretend
# that the fs is readonly and by association
# that the fs won't support symlinks. This
--- a/mercurial/profiling.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/profiling.py Thu Apr 16 22:51:09 2020 +0530
@@ -186,6 +186,7 @@
self._output = None
self._fp = None
self._fpdoclose = True
+ self._flushfp = None
self._profiler = None
self._enabled = enabled
self._entered = False
@@ -246,6 +247,8 @@
else:
self._fpdoclose = False
self._fp = self._ui.ferr
+ # Ensure we've flushed fout before writing to ferr.
+ self._flushfp = self._ui.fout
if proffn is not None:
pass
@@ -265,6 +268,7 @@
def __exit__(self, exception_type, exception_value, traceback):
propagate = None
if self._profiler is not None:
+ self._uiflush()
propagate = self._profiler.__exit__(
exception_type, exception_value, traceback
)
@@ -280,3 +284,7 @@
def _closefp(self):
if self._fpdoclose and self._fp is not None:
self._fp.close()
+
+ def _uiflush(self):
+ if self._flushfp:
+ self._flushfp.flush()
--- a/mercurial/pure/parsers.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/pure/parsers.py Thu Apr 16 22:51:09 2020 +0530
@@ -141,6 +141,50 @@
self._extra = self._extra[: i - self._lgt]
+class PersistentNodeMapIndexObject(IndexObject):
+ """a Debug oriented class to test persistent nodemap
+
+ We need a simple python object to test API and higher level behavior. See
+ the Rust implementation for more serious usage. This should be used only
+ through the dedicated `devel.persistent-nodemap` config.
+ """
+
+ def nodemap_data_all(self):
+ """Return bytes containing a full serialization of a nodemap
+
+ The nodemap should be valid for the full set of revisions in the
+ index."""
+ return nodemaputil.persistent_data(self)
+
+ def nodemap_data_incremental(self):
+ """Return bytes containing a incremental update to persistent nodemap
+
+ This containst the data for an append-only update of the data provided
+ in the last call to `update_nodemap_data`.
+ """
+ if self._nm_root is None:
+ return None
+ docket = self._nm_docket
+ changed, data = nodemaputil.update_persistent_data(
+ self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
+ )
+
+ self._nm_root = self._nm_max_idx = self._nm_docket = None
+ return docket, changed, data
+
+ def update_nodemap_data(self, docket, nm_data):
+ """provide full block of persisted binary data for a nodemap
+
+ The data are expected to come from disk. See `nodemap_data_all` for a
+ produceur of such data."""
+ if nm_data is not None:
+ self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
+ if self._nm_root:
+ self._nm_docket = docket
+ else:
+ self._nm_root = self._nm_max_idx = self._nm_docket = None
+
+
class InlinedIndexObject(BaseIndexObject):
def __init__(self, data, inline=0):
self._data = data
@@ -188,6 +232,12 @@
return InlinedIndexObject(data, inline), (0, data)
+def parse_index_devel_nodemap(data, inline):
+ """like parse_index2, but alway return a PersistentNodeMapIndexObject
+ """
+ return PersistentNodeMapIndexObject(data), None
+
+
def parse_dirstate(dmap, copymap, st):
parents = [st[:20], st[20:40]]
# dereference fields so they will be local in loop
--- a/mercurial/pvec.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/pvec.py Thu Apr 16 22:51:09 2020 +0530
@@ -48,7 +48,7 @@
different branches
'''
-from __future__ import absolute_import, division
+from __future__ import absolute_import
from .node import nullrev
from . import (
--- a/mercurial/pycompat.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/pycompat.py Thu Apr 16 22:51:09 2020 +0530
@@ -98,6 +98,7 @@
import codecs
import functools
import io
+ import locale
import struct
if os.name == r'nt' and sys.version_info >= (3, 6):
@@ -148,15 +149,36 @@
stdout = sys.stdout.buffer
stderr = sys.stderr.buffer
- # Since Python 3 converts argv to wchar_t type by Py_DecodeLocale() on Unix,
- # we can use os.fsencode() to get back bytes argv.
- #
- # https://hg.python.org/cpython/file/v3.5.1/Programs/python.c#l55
- #
- # On Windows, the native argv is unicode and is converted to MBCS bytes
- # since we do enable the legacy filesystem encoding.
if getattr(sys, 'argv', None) is not None:
- sysargv = list(map(os.fsencode, sys.argv))
+ # On POSIX, the char** argv array is converted to Python str using
+ # Py_DecodeLocale(). The inverse of this is Py_EncodeLocale(), which isn't
+ # directly callable from Python code. So, we need to emulate it.
+ # Py_DecodeLocale() calls mbstowcs() and falls back to mbrtowc() with
+ # surrogateescape error handling on failure. These functions take the
+ # current system locale into account. So, the inverse operation is to
+ # .encode() using the system locale's encoding and using the
+ # surrogateescape error handler. The only tricky part here is getting
+ # the system encoding correct, since `locale.getlocale()` can return
+ # None. We fall back to the filesystem encoding if lookups via `locale`
+ # fail, as this seems like a reasonable thing to do.
+ #
+ # On Windows, the wchar_t **argv is passed into the interpreter as-is.
+ # Like POSIX, we need to emulate what Py_EncodeLocale() would do. But
+ # there's an additional wrinkle. What we really want to access is the
+ # ANSI codepage representation of the arguments, as this is what
+ # `int main()` would receive if Python 3 didn't define `int wmain()`
+ # (this is how Python 2 worked). To get that, we encode with the mbcs
+ # encoding, which will pass CP_ACP to the underlying Windows API to
+ # produce bytes.
+ if os.name == r'nt':
+ sysargv = [a.encode("mbcs", "ignore") for a in sys.argv]
+ else:
+ encoding = (
+ locale.getlocale()[1]
+ or locale.getdefaultlocale()[1]
+ or sys.getfilesystemencoding()
+ )
+ sysargv = [a.encode(encoding, "surrogateescape") for a in sys.argv]
bytechr = struct.Struct('>B').pack
byterepr = b'%r'.__mod__
@@ -334,7 +356,7 @@
they can be passed as keyword arguments as dictonaries with bytes keys
can't be passed as keyword arguments to functions on Python 3.
"""
- dic = dict((k.decode('latin-1'), v) for k, v in dic.items())
+ dic = {k.decode('latin-1'): v for k, v in dic.items()}
return dic
def byteskwargs(dic):
@@ -342,7 +364,7 @@
Converts keys of python dictonaries to bytes as they were converted to
str to pass that dictonary as a keyword argument on Python 3.
"""
- dic = dict((k.encode('latin-1'), v) for k, v in dic.items())
+ dic = {k.encode('latin-1'): v for k, v in dic.items()}
return dic
# TODO: handle shlex.shlex().
--- a/mercurial/repair.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/repair.py Thu Apr 16 22:51:09 2020 +0530
@@ -351,7 +351,7 @@
def safestriproots(ui, repo, nodes):
"""return list of roots of nodes where descendants are covered by nodes"""
torev = repo.unfiltered().changelog.rev
- revs = set(torev(n) for n in nodes)
+ revs = {torev(n) for n in nodes}
# tostrip = wanted - unsafe = wanted - ancestors(orphaned)
# orphaned = affected - wanted
# affected = descendants(roots(wanted))
--- a/mercurial/revlog.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/revlog.py Thu Apr 16 22:51:09 2020 +0530
@@ -352,6 +352,21 @@
return p
+NodemapRevlogIO = None
+
+if util.safehasattr(parsers, 'parse_index_devel_nodemap'):
+
+ class NodemapRevlogIO(revlogio):
+ """A debug oriented IO class that return a PersistentNodeMapIndexObject
+
+ The PersistentNodeMapIndexObject object is meant to test the persistent nodemap feature.
+ """
+
+ def parseindex(self, data, inline):
+ index, cache = parsers.parse_index_devel_nodemap(data, inline)
+ return index, cache
+
+
class rustrevlogio(revlogio):
def parseindex(self, data, inline):
index, cache = super(rustrevlogio, self).parseindex(data, inline)
@@ -407,6 +422,7 @@
mmaplargeindex=False,
censorable=False,
upperboundcomp=None,
+ persistentnodemap=False,
):
"""
create a revlog object
@@ -418,6 +434,17 @@
self.upperboundcomp = upperboundcomp
self.indexfile = indexfile
self.datafile = datafile or (indexfile[:-2] + b".d")
+ self.nodemap_file = None
+ if persistentnodemap:
+ if indexfile.endswith(b'.a'):
+ pending_path = indexfile[:-4] + b".n.a"
+ if opener.exists(pending_path):
+ self.nodemap_file = pending_path
+ else:
+ self.nodemap_file = indexfile[:-4] + b".n"
+ else:
+ self.nodemap_file = indexfile[:-2] + b".n"
+
self.opener = opener
# When True, indexfile is opened with checkambig=True at writing, to
# avoid file stat ambiguity.
@@ -435,6 +462,7 @@
self._maxchainlen = None
self._deltabothparents = True
self.index = None
+ self._nodemap_docket = None
# Mapping of partial identifiers to full nodes.
self._pcache = {}
# Mapping of revision integer to full node.
@@ -591,13 +619,42 @@
self._storedeltachains = True
+ devel_nodemap = (
+ self.nodemap_file
+ and opts.get(b'devel-force-nodemap', False)
+ and NodemapRevlogIO is not None
+ )
+
+ use_rust_index = False
+ if rustrevlog is not None:
+ if self.nodemap_file is not None:
+ use_rust_index = True
+ else:
+ use_rust_index = self.opener.options.get(b'rust.index')
+
self._io = revlogio()
if self.version == REVLOGV0:
self._io = revlogoldio()
- elif rustrevlog is not None and self.opener.options.get(b'rust.index'):
+ elif devel_nodemap:
+ self._io = NodemapRevlogIO()
+ elif use_rust_index:
self._io = rustrevlogio()
try:
d = self._io.parseindex(indexdata, self._inline)
+ index, _chunkcache = d
+ use_nodemap = (
+ not self._inline
+ and self.nodemap_file is not None
+ and util.safehasattr(index, 'update_nodemap_data')
+ )
+ if use_nodemap:
+ nodemap_data = nodemaputil.persisted_data(self)
+ if nodemap_data is not None:
+ docket = nodemap_data[0]
+ if d[0][docket.tip_rev][7] == docket.tip_node:
+ # no changelog tampering
+ self._nodemap_docket = docket
+ index.update_nodemap_data(*nodemap_data)
except (ValueError, IndexError):
raise error.RevlogError(
_(b"index %s is corrupted") % self.indexfile
@@ -708,12 +765,32 @@
return False
return True
+ def update_caches(self, transaction):
+ if self.nodemap_file is not None:
+ if transaction is None:
+ nodemaputil.update_persistent_nodemap(self)
+ else:
+ nodemaputil.setup_persistent_nodemap(transaction, self)
+
def clearcaches(self):
self._revisioncache = None
self._chainbasecache.clear()
self._chunkcache = (0, b'')
self._pcache = {}
+ self._nodemap_docket = None
self.index.clearcaches()
+ # The python code is the one responsible for validating the docket, we
+ # end up having to refresh it here.
+ use_nodemap = (
+ not self._inline
+ and self.nodemap_file is not None
+ and util.safehasattr(self.index, 'update_nodemap_data')
+ )
+ if use_nodemap:
+ nodemap_data = nodemaputil.persisted_data(self)
+ if nodemap_data is not None:
+ self._nodemap_docket = nodemap_data[0]
+ self.index.update_nodemap_data(*nodemap_data)
def rev(self, node):
try:
@@ -898,9 +975,6 @@
if rustancestor is not None:
lazyancestors = rustancestor.LazyAncestors
arg = self.index
- elif util.safehasattr(parsers, b'rustlazyancestors'):
- lazyancestors = ancestor.rustlazyancestors
- arg = self.index
else:
lazyancestors = ancestor.lazyancestors
arg = self._uncheckedparentrevs
@@ -1239,7 +1313,7 @@
else:
start = self.rev(start)
- stoprevs = set(self.rev(n) for n in stop or [])
+ stoprevs = {self.rev(n) for n in stop or []}
revs = dagop.headrevssubset(
self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs
@@ -1960,6 +2034,7 @@
# manager
tr.replace(self.indexfile, trindex * self._io.size)
+ nodemaputil.setup_persistent_nodemap(tr, self)
self._chunkclear()
def _nodeduplicatecallback(self, transaction, node):
@@ -2286,6 +2361,7 @@
ifh.write(data[0])
ifh.write(data[1])
self._enforceinlinesize(transaction, ifh)
+ nodemaputil.setup_persistent_nodemap(transaction, self)
def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
"""
--- a/mercurial/revlogutils/nodemap.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/revlogutils/nodemap.py Thu Apr 16 22:51:09 2020 +0530
@@ -7,9 +7,622 @@
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
-from .. import error
+
+import errno
+import os
+import re
+import struct
+
+from .. import (
+ error,
+ node as nodemod,
+ util,
+)
class NodeMap(dict):
def __missing__(self, x):
raise error.RevlogError(b'unknown node: %s' % x)
+
+
+def persisted_data(revlog):
+ """read the nodemap for a revlog from disk"""
+ if revlog.nodemap_file is None:
+ return None
+ pdata = revlog.opener.tryread(revlog.nodemap_file)
+ if not pdata:
+ return None
+ offset = 0
+ (version,) = S_VERSION.unpack(pdata[offset : offset + S_VERSION.size])
+ if version != ONDISK_VERSION:
+ return None
+ offset += S_VERSION.size
+ headers = S_HEADER.unpack(pdata[offset : offset + S_HEADER.size])
+ uid_size, tip_rev, data_length, data_unused, tip_node_size = headers
+ offset += S_HEADER.size
+ docket = NodeMapDocket(pdata[offset : offset + uid_size])
+ offset += uid_size
+ docket.tip_rev = tip_rev
+ docket.tip_node = pdata[offset : offset + tip_node_size]
+ docket.data_length = data_length
+ docket.data_unused = data_unused
+
+ filename = _rawdata_filepath(revlog, docket)
+ use_mmap = revlog.opener.options.get(b"exp-persistent-nodemap.mmap")
+ try:
+ with revlog.opener(filename) as fd:
+ if use_mmap:
+ data = util.buffer(util.mmapread(fd, data_length))
+ else:
+ data = fd.read(data_length)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if len(data) < data_length:
+ return None
+ return docket, data
+
+
+def setup_persistent_nodemap(tr, revlog):
+ """Install whatever is needed transaction side to persist a nodemap on disk
+
+ (only actually persist the nodemap if this is relevant for this revlog)
+ """
+ if revlog._inline:
+ return # inlined revlog are too small for this to be relevant
+ if revlog.nodemap_file is None:
+ return # we do not use persistent_nodemap on this revlog
+
+ # we need to happen after the changelog finalization, in that use "cl-"
+ callback_id = b"nm-revlog-persistent-nodemap-%s" % revlog.nodemap_file
+ if tr.hasfinalize(callback_id):
+ return # no need to register again
+ tr.addpending(
+ callback_id, lambda tr: _persist_nodemap(tr, revlog, pending=True)
+ )
+ tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog))
+
+
+class _NoTransaction(object):
+ """transaction like object to update the nodemap outside a transaction
+ """
+
+ def __init__(self):
+ self._postclose = {}
+
+ def addpostclose(self, callback_id, callback_func):
+ self._postclose[callback_id] = callback_func
+
+ def registertmp(self, *args, **kwargs):
+ pass
+
+ def addbackup(self, *args, **kwargs):
+ pass
+
+ def add(self, *args, **kwargs):
+ pass
+
+ def addabort(self, *args, **kwargs):
+ pass
+
+
+def update_persistent_nodemap(revlog):
+ """update the persistent nodemap right now
+
+ To be used for updating the nodemap on disk outside of a normal transaction
+ setup (eg, `debugupdatecache`).
+ """
+ notr = _NoTransaction()
+ _persist_nodemap(notr, revlog)
+ for k in sorted(notr._postclose):
+ notr._postclose[k](None)
+
+
+def _persist_nodemap(tr, revlog, pending=False):
+ """Write nodemap data on disk for a given revlog
+ """
+ if getattr(revlog, 'filteredrevs', ()):
+ raise error.ProgrammingError(
+ "cannot persist nodemap of a filtered changelog"
+ )
+ if revlog.nodemap_file is None:
+ msg = "calling persist nodemap on a revlog without the feature enableb"
+ raise error.ProgrammingError(msg)
+
+ can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental")
+ ondisk_docket = revlog._nodemap_docket
+ feed_data = util.safehasattr(revlog.index, "update_nodemap_data")
+ use_mmap = revlog.opener.options.get(b"exp-persistent-nodemap.mmap")
+
+ data = None
+ # first attemp an incremental update of the data
+ if can_incremental and ondisk_docket is not None:
+ target_docket = revlog._nodemap_docket.copy()
+ (
+ src_docket,
+ data_changed_count,
+ data,
+ ) = revlog.index.nodemap_data_incremental()
+ new_length = target_docket.data_length + len(data)
+ new_unused = target_docket.data_unused + data_changed_count
+ if src_docket != target_docket:
+ data = None
+ elif new_length <= (new_unused * 10): # under 10% of unused data
+ data = None
+ else:
+ datafile = _rawdata_filepath(revlog, target_docket)
+ # EXP-TODO: if this is a cache, this should use a cache vfs, not a
+ # store vfs
+ tr.add(datafile, target_docket.data_length)
+ with revlog.opener(datafile, b'r+') as fd:
+ fd.seek(target_docket.data_length)
+ fd.write(data)
+ if feed_data:
+ if use_mmap:
+ fd.seek(0)
+ new_data = fd.read(new_length)
+ else:
+ fd.flush()
+ new_data = util.buffer(util.mmapread(fd, new_length))
+ target_docket.data_length = new_length
+ target_docket.data_unused = new_unused
+
+ if data is None:
+ # otherwise fallback to a full new export
+ target_docket = NodeMapDocket()
+ datafile = _rawdata_filepath(revlog, target_docket)
+ if util.safehasattr(revlog.index, "nodemap_data_all"):
+ data = revlog.index.nodemap_data_all()
+ else:
+ data = persistent_data(revlog.index)
+ # EXP-TODO: if this is a cache, this should use a cache vfs, not a
+ # store vfs
+
+ tryunlink = revlog.opener.tryunlink
+
+ def abortck(tr):
+ tryunlink(datafile)
+
+ callback_id = b"delete-%s" % datafile
+
+ # some flavor of the transaction abort does not cleanup new file, it
+ # simply empty them.
+ tr.addabort(callback_id, abortck)
+ with revlog.opener(datafile, b'w+') as fd:
+ fd.write(data)
+ if feed_data:
+ if use_mmap:
+ new_data = data
+ else:
+ fd.flush()
+ new_data = util.buffer(util.mmapread(fd, len(data)))
+ target_docket.data_length = len(data)
+ target_docket.tip_rev = revlog.tiprev()
+ target_docket.tip_node = revlog.node(target_docket.tip_rev)
+ # EXP-TODO: if this is a cache, this should use a cache vfs, not a
+ # store vfs
+ file_path = revlog.nodemap_file
+ if pending:
+ file_path += b'.a'
+ tr.registertmp(file_path)
+ else:
+ tr.addbackup(file_path)
+
+ with revlog.opener(file_path, b'w', atomictemp=True) as fp:
+ fp.write(target_docket.serialize())
+ revlog._nodemap_docket = target_docket
+ if feed_data:
+ revlog.index.update_nodemap_data(target_docket, new_data)
+
+ # search for old index file in all cases, some older process might have
+ # left one behind.
+ olds = _other_rawdata_filepath(revlog, target_docket)
+ if olds:
+ realvfs = getattr(revlog, '_realopener', revlog.opener)
+
+ def cleanup(tr):
+ for oldfile in olds:
+ realvfs.tryunlink(oldfile)
+
+ callback_id = b"revlog-cleanup-nodemap-%s" % revlog.nodemap_file
+ tr.addpostclose(callback_id, cleanup)
+
+
+### Nodemap docket file
+#
+# The nodemap data are stored on disk using 2 files:
+#
+# * a raw data files containing a persistent nodemap
+# (see `Nodemap Trie` section)
+#
+# * a small "docket" file containing medatadata
+#
+# While the nodemap data can be multiple tens of megabytes, the "docket" is
+# small, it is easy to update it automatically or to duplicated its content
+# during a transaction.
+#
+# Multiple raw data can exist at the same time (The currently valid one and a
+# new one beind used by an in progress transaction). To accomodate this, the
+# filename hosting the raw data has a variable parts. The exact filename is
+# specified inside the "docket" file.
+#
+# The docket file contains information to find, qualify and validate the raw
+# data. Its content is currently very light, but it will expand as the on disk
+# nodemap gains the necessary features to be used in production.
+
+# version 0 is experimental, no BC garantee, do no use outside of tests.
+ONDISK_VERSION = 0
+S_VERSION = struct.Struct(">B")
+S_HEADER = struct.Struct(">BQQQQ")
+
+ID_SIZE = 8
+
+
+def _make_uid():
+ """return a new unique identifier.
+
+ The identifier is random and composed of ascii characters."""
+ return nodemod.hex(os.urandom(ID_SIZE))
+
+
+class NodeMapDocket(object):
+ """metadata associated with persistent nodemap data
+
+ The persistent data may come from disk or be on their way to disk.
+ """
+
+ def __init__(self, uid=None):
+ if uid is None:
+ uid = _make_uid()
+ # a unique identifier for the data file:
+ # - When new data are appended, it is preserved.
+ # - When a new data file is created, a new identifier is generated.
+ self.uid = uid
+ # the tipmost revision stored in the data file. This revision and all
+ # revision before it are expected to be encoded in the data file.
+ self.tip_rev = None
+ # the node of that tipmost revision, if it mismatch the current index
+ # data the docket is not valid for the current index and should be
+ # discarded.
+ #
+ # note: this method is not perfect as some destructive operation could
+ # preserve the same tip_rev + tip_node while altering lower revision.
+ # However this multiple other caches have the same vulnerability (eg:
+ # brancmap cache).
+ self.tip_node = None
+ # the size (in bytes) of the persisted data to encode the nodemap valid
+ # for `tip_rev`.
+ # - data file shorter than this are corrupted,
+ # - any extra data should be ignored.
+ self.data_length = None
+ # the amount (in bytes) of "dead" data, still in the data file but no
+ # longer used for the nodemap.
+ self.data_unused = 0
+
+ def copy(self):
+ new = NodeMapDocket(uid=self.uid)
+ new.tip_rev = self.tip_rev
+ new.tip_node = self.tip_node
+ new.data_length = self.data_length
+ new.data_unused = self.data_unused
+ return new
+
+ def __cmp__(self, other):
+ if self.uid < other.uid:
+ return -1
+ if self.uid > other.uid:
+ return 1
+ elif self.data_length < other.data_length:
+ return -1
+ elif self.data_length > other.data_length:
+ return 1
+ return 0
+
+ def __eq__(self, other):
+ return self.uid == other.uid and self.data_length == other.data_length
+
+ def serialize(self):
+ """return serialized bytes for a docket using the passed uid"""
+ data = []
+ data.append(S_VERSION.pack(ONDISK_VERSION))
+ headers = (
+ len(self.uid),
+ self.tip_rev,
+ self.data_length,
+ self.data_unused,
+ len(self.tip_node),
+ )
+ data.append(S_HEADER.pack(*headers))
+ data.append(self.uid)
+ data.append(self.tip_node)
+ return b''.join(data)
+
+
+def _rawdata_filepath(revlog, docket):
+ """The (vfs relative) nodemap's rawdata file for a given uid"""
+ if revlog.nodemap_file.endswith(b'.n.a'):
+ prefix = revlog.nodemap_file[:-4]
+ else:
+ prefix = revlog.nodemap_file[:-2]
+ return b"%s-%s.nd" % (prefix, docket.uid)
+
+
+def _other_rawdata_filepath(revlog, docket):
+ prefix = revlog.nodemap_file[:-2]
+ pattern = re.compile(br"(^|/)%s-[0-9a-f]+\.nd$" % prefix)
+ new_file_path = _rawdata_filepath(revlog, docket)
+ new_file_name = revlog.opener.basename(new_file_path)
+ dirpath = revlog.opener.dirname(new_file_path)
+ others = []
+ for f in revlog.opener.listdir(dirpath):
+ if pattern.match(f) and f != new_file_name:
+ others.append(f)
+ return others
+
+
+### Nodemap Trie
+#
+# This is a simple reference implementation to compute and persist a nodemap
+# trie. This reference implementation is write only. The python version of this
+# is not expected to be actually used, since it wont provide performance
+# improvement over existing non-persistent C implementation.
+#
+# The nodemap is persisted as Trie using 4bits-address/16-entries block. each
+# revision can be adressed using its node shortest prefix.
+#
+# The trie is stored as a sequence of block. Each block contains 16 entries
+# (signed 64bit integer, big endian). Each entry can be one of the following:
+#
+# * value >= 0 -> index of sub-block
+# * value == -1 -> no value
+# * value < -1 -> a revision value: rev = -(value+10)
+#
+# The implementation focus on simplicity, not on performance. A Rust
+# implementation should provide a efficient version of the same binary
+# persistence. This reference python implementation is never meant to be
+# extensively use in production.
+
+
+def persistent_data(index):
+ """return the persistent binary form for a nodemap for a given index
+ """
+ trie = _build_trie(index)
+ return _persist_trie(trie)
+
+
+def update_persistent_data(index, root, max_idx, last_rev):
+ """return the incremental update for persistent nodemap from a given index
+ """
+ changed_block, trie = _update_trie(index, root, last_rev)
+ return (
+ changed_block * S_BLOCK.size,
+ _persist_trie(trie, existing_idx=max_idx),
+ )
+
+
+S_BLOCK = struct.Struct(">" + ("l" * 16))
+
+NO_ENTRY = -1
+# rev 0 need to be -2 because 0 is used by block, -1 is a special value.
+REV_OFFSET = 2
+
+
+def _transform_rev(rev):
+ """Return the number used to represent the rev in the tree.
+
+ (or retrieve a rev number from such representation)
+
+ Note that this is an involution, a function equal to its inverse (i.e.
+ which gives the identity when applied to itself).
+ """
+ return -(rev + REV_OFFSET)
+
+
+def _to_int(hex_digit):
+ """turn an hexadecimal digit into a proper integer"""
+ return int(hex_digit, 16)
+
+
+class Block(dict):
+ """represent a block of the Trie
+
+ contains up to 16 entry indexed from 0 to 15"""
+
+ def __init__(self):
+ super(Block, self).__init__()
+ # If this block exist on disk, here is its ID
+ self.ondisk_id = None
+
+ def __iter__(self):
+ return iter(self.get(i) for i in range(16))
+
+
+def _build_trie(index):
+ """build a nodemap trie
+
+ The nodemap stores revision number for each unique prefix.
+
+ Each block is a dictionary with keys in `[0, 15]`. Values are either
+ another block or a revision number.
+ """
+ root = Block()
+ for rev in range(len(index)):
+ hex = nodemod.hex(index[rev][7])
+ _insert_into_block(index, 0, root, rev, hex)
+ return root
+
+
+def _update_trie(index, root, last_rev):
+ """consume"""
+ changed = 0
+ for rev in range(last_rev + 1, len(index)):
+ hex = nodemod.hex(index[rev][7])
+ changed += _insert_into_block(index, 0, root, rev, hex)
+ return changed, root
+
+
+def _insert_into_block(index, level, block, current_rev, current_hex):
+ """insert a new revision in a block
+
+ index: the index we are adding revision for
+ level: the depth of the current block in the trie
+ block: the block currently being considered
+ current_rev: the revision number we are adding
+ current_hex: the hexadecimal representation of the of that revision
+ """
+ changed = 1
+ if block.ondisk_id is not None:
+ block.ondisk_id = None
+ hex_digit = _to_int(current_hex[level : level + 1])
+ entry = block.get(hex_digit)
+ if entry is None:
+ # no entry, simply store the revision number
+ block[hex_digit] = current_rev
+ elif isinstance(entry, dict):
+ # need to recurse to an underlying block
+ changed += _insert_into_block(
+ index, level + 1, entry, current_rev, current_hex
+ )
+ else:
+ # collision with a previously unique prefix, inserting new
+ # vertices to fit both entry.
+ other_hex = nodemod.hex(index[entry][7])
+ other_rev = entry
+ new = Block()
+ block[hex_digit] = new
+ _insert_into_block(index, level + 1, new, other_rev, other_hex)
+ _insert_into_block(index, level + 1, new, current_rev, current_hex)
+ return changed
+
+
+def _persist_trie(root, existing_idx=None):
+ """turn a nodemap trie into persistent binary data
+
+ See `_build_trie` for nodemap trie structure"""
+ block_map = {}
+ if existing_idx is not None:
+ base_idx = existing_idx + 1
+ else:
+ base_idx = 0
+ chunks = []
+ for tn in _walk_trie(root):
+ if tn.ondisk_id is not None:
+ block_map[id(tn)] = tn.ondisk_id
+ else:
+ block_map[id(tn)] = len(chunks) + base_idx
+ chunks.append(_persist_block(tn, block_map))
+ return b''.join(chunks)
+
+
+def _walk_trie(block):
+ """yield all the block in a trie
+
+ Children blocks are always yield before their parent block.
+ """
+ for (__, item) in sorted(block.items()):
+ if isinstance(item, dict):
+ for sub_block in _walk_trie(item):
+ yield sub_block
+ yield block
+
+
+def _persist_block(block_node, block_map):
+ """produce persistent binary data for a single block
+
+ Children block are assumed to be already persisted and present in
+ block_map.
+ """
+ data = tuple(_to_value(v, block_map) for v in block_node)
+ return S_BLOCK.pack(*data)
+
+
+def _to_value(item, block_map):
+ """persist any value as an integer"""
+ if item is None:
+ return NO_ENTRY
+ elif isinstance(item, dict):
+ return block_map[id(item)]
+ else:
+ return _transform_rev(item)
+
+
+def parse_data(data):
+ """parse parse nodemap data into a nodemap Trie"""
+ if (len(data) % S_BLOCK.size) != 0:
+ msg = "nodemap data size is not a multiple of block size (%d): %d"
+ raise error.Abort(msg % (S_BLOCK.size, len(data)))
+ if not data:
+ return Block(), None
+ block_map = {}
+ new_blocks = []
+ for i in range(0, len(data), S_BLOCK.size):
+ block = Block()
+ block.ondisk_id = len(block_map)
+ block_map[block.ondisk_id] = block
+ block_data = data[i : i + S_BLOCK.size]
+ values = S_BLOCK.unpack(block_data)
+ new_blocks.append((block, values))
+ for b, values in new_blocks:
+ for idx, v in enumerate(values):
+ if v == NO_ENTRY:
+ continue
+ elif v >= 0:
+ b[idx] = block_map[v]
+ else:
+ b[idx] = _transform_rev(v)
+ return block, i // S_BLOCK.size
+
+
+# debug utility
+
+
+def check_data(ui, index, data):
+ """verify that the provided nodemap data are valid for the given idex"""
+ ret = 0
+ ui.status((b"revision in index: %d\n") % len(index))
+ root, __ = parse_data(data)
+ all_revs = set(_all_revisions(root))
+ ui.status((b"revision in nodemap: %d\n") % len(all_revs))
+ for r in range(len(index)):
+ if r not in all_revs:
+ msg = b" revision missing from nodemap: %d\n" % r
+ ui.write_err(msg)
+ ret = 1
+ else:
+ all_revs.remove(r)
+ nm_rev = _find_node(root, nodemod.hex(index[r][7]))
+ if nm_rev is None:
+ msg = b" revision node does not match any entries: %d\n" % r
+ ui.write_err(msg)
+ ret = 1
+ elif nm_rev != r:
+ msg = (
+ b" revision node does not match the expected revision: "
+ b"%d != %d\n" % (r, nm_rev)
+ )
+ ui.write_err(msg)
+ ret = 1
+
+ if all_revs:
+ for r in sorted(all_revs):
+ msg = b" extra revision in nodemap: %d\n" % r
+ ui.write_err(msg)
+ ret = 1
+ return ret
+
+
+def _all_revisions(root):
+ """return all revisions stored in a Trie"""
+ for block in _walk_trie(root):
+ for v in block:
+ if v is None or isinstance(v, Block):
+ continue
+ yield v
+
+
+def _find_node(block, node):
+ """find the revision associated with a given node"""
+ entry = block.get(_to_int(node[0:1]))
+ if isinstance(entry, dict):
+ return _find_node(entry, node[1:])
+ return entry
--- a/mercurial/revset.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/revset.py Thu Apr 16 22:51:09 2020 +0530
@@ -247,7 +247,15 @@
def relationset(repo, subset, x, y, order):
- raise error.ParseError(_(b"can't use a relation in this context"))
+ # this is pretty basic implementation of 'x#y' operator, still
+ # experimental so undocumented. see the wiki for further ideas.
+ # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan
+ rel = getsymbol(y)
+ if rel in relations:
+ return relations[rel](repo, subset, x, rel, order)
+
+ relnames = [r for r in relations.keys() if len(r) > 1]
+ raise error.UnknownIdentifier(rel, relnames)
def _splitrange(a, b):
@@ -281,7 +289,12 @@
return ancdepths, descdepths
-def generationsrel(repo, subset, x, rel, z, order):
+def generationsrel(repo, subset, x, rel, order):
+ z = (b'rangeall', None)
+ return generationssubrel(repo, subset, x, rel, z, order)
+
+
+def generationssubrel(repo, subset, x, rel, z, order):
# TODO: rewrite tests, and drop startdepth argument from ancestors() and
# descendants() predicates
a, b = getintrange(
@@ -769,6 +782,38 @@
return subset
+@predicate(b'conflictlocal()', safe=True)
+def conflictlocal(repo, subset, x):
+ """The local side of the merge, if currently in an unresolved merge.
+
+ "merge" here includes merge conflicts from e.g. 'hg rebase' or 'hg graft'.
+ """
+ getargs(x, 0, 0, _(b"conflictlocal takes no arguments"))
+ from . import merge
+
+ mergestate = merge.mergestate.read(repo)
+ if mergestate.active() and repo.changelog.hasnode(mergestate.local):
+ return subset & {repo.changelog.rev(mergestate.local)}
+
+ return baseset()
+
+
+@predicate(b'conflictother()', safe=True)
+def conflictother(repo, subset, x):
+ """The other side of the merge, if currently in an unresolved merge.
+
+ "merge" here includes merge conflicts from e.g. 'hg rebase' or 'hg graft'.
+ """
+ getargs(x, 0, 0, _(b"conflictother takes no arguments"))
+ from . import merge
+
+ mergestate = merge.mergestate.read(repo)
+ if mergestate.active() and repo.changelog.hasnode(mergestate.other):
+ return subset & {repo.changelog.rev(mergestate.other)}
+
+ return baseset()
+
+
@predicate(b'contains(pattern)', weight=100)
def contains(repo, subset, x):
"""The revision's manifest contains a file matching pattern (but might not
@@ -1022,7 +1067,7 @@
@predicate(b'extinct()', safe=True)
def extinct(repo, subset, x):
- """Obsolete changesets with obsolete descendants only.
+ """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)
"""
# i18n: "extinct" is a keyword
getargs(x, 0, 0, _(b"extinct takes no arguments"))
@@ -1670,7 +1715,7 @@
@predicate(b'obsolete()', safe=True)
def obsolete(repo, subset, x):
- """Mutable changeset with a newer version."""
+ """Mutable changeset with a newer version. (EXPERIMENTAL)"""
# i18n: "obsolete" is a keyword
getargs(x, 0, 0, _(b"obsolete takes no arguments"))
obsoletes = obsmod.getrevs(repo, b'obsolete')
@@ -1843,7 +1888,7 @@
The set of all parents for all changesets in set, or the working directory.
"""
if x is None:
- ps = set(p.rev() for p in repo[x].parents())
+ ps = {p.rev() for p in repo[x].parents()}
else:
ps = set()
cl = repo.changelog
@@ -2050,19 +2095,11 @@
@predicate(b'rev(number)', safe=True)
def rev(repo, subset, x):
- """Revision with the given numeric identifier.
- """
- # i18n: "rev" is a keyword
- l = getargs(x, 1, 1, _(b"rev requires one argument"))
+ """Revision with the given numeric identifier."""
try:
- # i18n: "rev" is a keyword
- l = int(getstring(l[0], _(b"rev requires a number")))
- except (TypeError, ValueError):
- # i18n: "rev" is a keyword
- raise error.ParseError(_(b"rev expects a number"))
- if l not in repo.changelog and l not in _virtualrevs:
+ return _rev(repo, subset, x)
+ except error.RepoLookupError:
return baseset()
- return subset & baseset([l])
@predicate(b'_rev(number)', safe=True)
@@ -2076,7 +2113,11 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_(b"rev expects a number"))
- repo.changelog.node(l) # check that the rev exists
+ if l not in _virtualrevs:
+ try:
+ repo.changelog.node(l) # check that the rev exists
+ except IndexError:
+ raise error.RepoLookupError(_(b"unknown revision '%d'") % l)
return subset & baseset([l])
@@ -2405,14 +2446,15 @@
cl = repo.unfiltered().changelog
torev = cl.index.get_rev
tonode = cl.node
- result = set(torev(n) for n in f(tonode(r) for r in s))
+ result = {torev(n) for n in f(tonode(r) for r in s)}
result.discard(None)
return smartset.baseset(result - repo.changelog.filteredrevs)
@predicate(b'successors(set)', safe=True)
def successors(repo, subset, x):
- """All successors for set, including the given set themselves"""
+ """All successors for set, including the given set themselves.
+ (EXPERIMENTAL)"""
s = getset(repo, fullreposet(repo), x)
f = lambda nodes: obsutil.allsuccessors(repo.obsstore, nodes)
d = _mapbynodefunc(repo, s, f)
@@ -2479,6 +2521,19 @@
return subset & orphan
+@predicate(b'unstable()', safe=True)
+def unstable(repo, subset, x):
+ """Changesets with instabilities. (EXPERIMENTAL)
+ """
+ # i18n: "unstable" is a keyword
+ getargs(x, 0, 0, b'unstable takes no arguments')
+ _unstable = set()
+ _unstable.update(obsmod.getrevs(repo, b'orphan'))
+ _unstable.update(obsmod.getrevs(repo, b'phasedivergent'))
+ _unstable.update(obsmod.getrevs(repo, b'contentdivergent'))
+ return subset & baseset(_unstable)
+
+
@predicate(b'user(string)', safe=True, weight=10)
def user(repo, subset, x):
"""User name contains string. The match is case-insensitive.
@@ -2605,11 +2660,16 @@
b"smartset": rawsmartset,
}
-subscriptrelations = {
+relations = {
b"g": generationsrel,
b"generations": generationsrel,
}
+subscriptrelations = {
+ b"g": generationssubrel,
+ b"generations": generationssubrel,
+}
+
def lookupfn(repo):
return lambda symbol: scmutil.isrevsymbol(repo, symbol)
--- a/mercurial/scmutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/scmutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -1457,10 +1457,10 @@
# Merge old parent and old working dir copies
oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
oldcopies.update(copies)
- copies = dict(
- (dst, oldcopies.get(src, src))
+ copies = {
+ dst: oldcopies.get(src, src)
for dst, src in pycompat.iteritems(oldcopies)
- )
+ }
# Adjust the dirstate copies
for dst, src in pycompat.iteritems(copies):
if src not in newctx or dst in newctx or ds[dst] != b'a':
@@ -1900,8 +1900,11 @@
_reportstroubledchangesets = True
-def registersummarycallback(repo, otr, txnname=b''):
+def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
"""register a callback to issue a summary after the transaction is closed
+
+ If as_validator is true, then the callbacks are registered as transaction
+ validators instead
"""
def txmatch(sources):
@@ -1927,7 +1930,10 @@
func(repo, tr)
newcat = b'%02i-txnreport' % len(categories)
- otr.addpostclose(newcat, wrapped)
+ if as_validator:
+ otr.addvalidator(newcat, wrapped)
+ else:
+ otr.addpostclose(newcat, wrapped)
categories.append(newcat)
return wrapped
@@ -1942,6 +1948,8 @@
if cgheads:
htext = _(b" (%+d heads)") % cgheads
msg = _(b"added %d changesets with %d changes to %d files%s\n")
+ if as_validator:
+ msg = _(b"adding %d changesets with %d changes to %d files%s\n")
assert repo is not None # help pytype
repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
@@ -1954,7 +1962,10 @@
if newmarkers:
repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
if obsoleted:
- repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
+ msg = _(b'obsoleted %i changesets\n')
+ if as_validator:
+ msg = _(b'obsoleting %i changesets\n')
+ repo.ui.status(msg % len(obsoleted))
if obsolete.isenabled(
repo, obsolete.createmarkersopt
@@ -2047,19 +2058,17 @@
pull/unbundle.
"""
origrepolen = tr.changes.get(b'origrepolen', len(repo))
- phasetracking = tr.changes.get(b'phases', {})
- if not phasetracking:
- return
- published = [
- rev
- for rev, (old, new) in pycompat.iteritems(phasetracking)
- if new == phases.public and rev < origrepolen
- ]
+ published = []
+ for revs, (old, new) in tr.changes.get(b'phases', []):
+ if new != phases.public:
+ continue
+ published.extend(rev for rev in revs if rev < origrepolen)
if not published:
return
- repo.ui.status(
- _(b'%d local changesets published\n') % len(published)
- )
+ msg = _(b'%d local changesets published\n')
+ if as_validator:
+ msg = _(b'%d local changesets will be published\n')
+ repo.ui.status(msg % len(published))
def getinstabilitymessage(delta, instability):
--- a/mercurial/shelve.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/shelve.py Thu Apr 16 22:51:09 2020 +0530
@@ -745,7 +745,7 @@
try:
checkparents(repo, state)
- merge.update(repo, state.pendingctx, branchmerge=False, force=True)
+ merge.clean_update(state.pendingctx)
if state.activebookmark and state.activebookmark in repo._bookmarks:
bookmarks.activate(repo, state.activebookmark)
mergefiles(ui, repo, state.wctx, state.pendingctx)
@@ -827,10 +827,6 @@
)
if newnode is None:
- # If it ended up being a no-op commit, then the normal
- # merge state clean-up path doesn't happen, so do it
- # here. Fix issue5494
- merge.mergestate.clean(repo)
shelvectx = state.pendingctx
msg = _(
b'note: unshelved changes already existed '
@@ -996,7 +992,6 @@
stats = merge.graft(
repo,
shelvectx,
- shelvectx.p1(),
labels=[b'working-copy', b'shelve'],
keepconflictparent=True,
)
@@ -1032,10 +1027,6 @@
)
if newnode is None:
- # If it ended up being a no-op commit, then the normal
- # merge state clean-up path doesn't happen, so do it
- # here. Fix issue5494
- merge.mergestate.clean(repo)
shelvectx = tmpwctx
msg = _(
b'note: unshelved changes already existed '
@@ -1083,7 +1074,7 @@
raise error.Abort(m, hint=hint)
-def dounshelve(ui, repo, *shelved, **opts):
+def unshelvecmd(ui, repo, *shelved, **opts):
opts = pycompat.byteskwargs(opts)
abortf = opts.get(b'abort')
continuef = opts.get(b'continue')
@@ -1130,6 +1121,10 @@
if not shelvedfile(repo, basename, patchextension).exists():
raise error.Abort(_(b"shelved change '%s' not found") % basename)
+ return _dounshelve(ui, repo, basename, opts)
+
+
+def _dounshelve(ui, repo, basename, opts):
repo = repo.unfiltered()
lock = tr = None
try:
--- a/mercurial/store.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/store.py Thu Apr 16 22:51:09 2020 +0530
@@ -137,7 +137,7 @@
asciistr = list(map(xchr, range(127)))
capitals = list(range(ord(b"A"), ord(b"Z") + 1))
- cmap = dict((x, x) for x in asciistr)
+ cmap = {x: x for x in asciistr}
for x in _reserved():
cmap[xchr(x)] = b"~%02x" % x
for x in capitals + [ord(e)]:
@@ -200,7 +200,7 @@
'the~07quick~adshot'
'''
xchr = pycompat.bytechr
- cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
+ cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
for x in _reserved():
cmap[xchr(x)] = b"~%02x" % x
for x in range(ord(b"A"), ord(b"Z") + 1):
--- a/mercurial/subrepo.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/subrepo.py Thu Apr 16 22:51:09 2020 +0530
@@ -806,7 +806,7 @@
self.ui.debug(
b'merging subrepository "%s"\n' % subrelpath(self)
)
- hg.merge(self._repo, state[1], remind=False)
+ hg.merge(dst, remind=False)
wctx = self._repo[None]
if self.dirty():
--- a/mercurial/tags.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/tags.py Thu Apr 16 22:51:09 2020 +0530
@@ -720,15 +720,20 @@
self._dirtyoffset = None
- if rawlen < wantedlen:
- self._dirtyoffset = rawlen
- self._raw.extend(b'\xff' * (wantedlen - rawlen))
- elif rawlen > wantedlen:
+ rawlentokeep = min(
+ wantedlen, (rawlen // _fnodesrecsize) * _fnodesrecsize
+ )
+ if rawlen > rawlentokeep:
# There's no easy way to truncate array instances. This seems
# slightly less evil than copying a potentially large array slice.
- for i in range(rawlen - wantedlen):
+ for i in range(rawlen - rawlentokeep):
self._raw.pop()
- self._dirtyoffset = len(self._raw)
+ rawlen = len(self._raw)
+ self._dirtyoffset = rawlen
+ if rawlen < wantedlen:
+ if self._dirtyoffset is None:
+ self._dirtyoffset = rawlen
+ self._raw.extend(b'\xff' * (wantedlen - rawlen))
def getfnode(self, node, computemissing=True):
"""Obtain the filenode of the .hgtags file at a specified revision.
--- a/mercurial/templatefilters.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/templatefilters.py Thu Apr 16 22:51:09 2020 +0530
@@ -18,6 +18,7 @@
node,
pycompat,
registrar,
+ smartset,
templateutil,
url,
util,
@@ -105,9 +106,17 @@
return os.path.basename(path)
+def _tocborencodable(obj):
+ if isinstance(obj, smartset.abstractsmartset):
+ return list(obj)
+ return obj
+
+
@templatefilter(b'cbor')
def cbor(obj):
"""Any object. Serializes the object to CBOR bytes."""
+ # cborutil is stricter about type than json() filter
+ obj = pycompat.rapply(_tocborencodable, obj)
return b''.join(cborutil.streamencode(obj))
--- a/mercurial/templatefuncs.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/templatefuncs.py Thu Apr 16 22:51:09 2020 +0530
@@ -16,6 +16,7 @@
)
from . import (
color,
+ dagop,
diffutil,
encoding,
error,
@@ -658,17 +659,19 @@
return m(repo)
if len(args) > 1:
+ key = None # dynamically-created revs shouldn't be cached
formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
revs = query(revsetlang.formatspec(raw, *formatargs))
else:
cache = context.resource(mapping, b'cache')
revsetcache = cache.setdefault(b"revsetcache", {})
- if raw in revsetcache:
- revs = revsetcache[raw]
+ key = raw
+ if key in revsetcache:
+ revs = revsetcache[key]
else:
revs = query(raw)
- revsetcache[raw] = revs
- return templatekw.showrevslist(context, mapping, b"revision", revs)
+ revsetcache[key] = revs
+ return templateutil.revslist(repo, revs, name=b'revision', cachekey=key)
@templatefunc(b'rstdoc(text, style)')
@@ -840,6 +843,45 @@
return b''
+@templatefunc(
+ b'subsetparents(rev, revset)',
+ argspec=b'rev revset',
+ requires={b'repo', b'cache'},
+)
+def subsetparents(context, mapping, args):
+ """Look up parents of the rev in the sub graph given by the revset."""
+ if b'rev' not in args or b'revset' not in args:
+ # i18n: "subsetparents" is a keyword
+ raise error.ParseError(_(b"subsetparents expects two arguments"))
+
+ repo = context.resource(mapping, b'repo')
+
+ rev = templateutil.evalinteger(context, mapping, args[b'rev'])
+
+ # TODO: maybe subsetparents(rev) should be allowed. the default revset
+ # will be the revisions specified by -rREV argument.
+ q = templateutil.evalwrapped(context, mapping, args[b'revset'])
+ if not isinstance(q, templateutil.revslist):
+ # i18n: "subsetparents" is a keyword
+ raise error.ParseError(_(b"subsetparents expects a queried revset"))
+ subset = q.tovalue(context, mapping)
+ key = q.cachekey
+
+ if key:
+ # cache only if revset query isn't dynamic
+ cache = context.resource(mapping, b'cache')
+ walkercache = cache.setdefault(b'subsetparentswalker', {})
+ if key in walkercache:
+ walker = walkercache[key]
+ else:
+ walker = dagop.subsetparentswalker(repo, subset)
+ walkercache[key] = walker
+ else:
+ # for one-shot use, specify startrev to limit the search space
+ walker = dagop.subsetparentswalker(repo, subset, startrev=rev)
+ return templateutil.revslist(repo, walker.parentsset(rev))
+
+
@templatefunc(b'word(number, text[, separator])')
def word(context, mapping, args):
"""Return the nth word from a string."""
--- a/mercurial/templatekw.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/templatekw.py Thu Apr 16 22:51:09 2020 +0530
@@ -396,26 +396,40 @@
return templateutil.compatfileslist(context, mapping, b'file', ctx.files())
-@templatekeyword(b'graphnode', requires={b'repo', b'ctx'})
+@templatekeyword(b'graphnode', requires={b'repo', b'ctx', b'cache'})
def showgraphnode(context, mapping):
"""String. The character representing the changeset node in an ASCII
revision graph."""
repo = context.resource(mapping, b'repo')
ctx = context.resource(mapping, b'ctx')
- return getgraphnode(repo, ctx)
+ cache = context.resource(mapping, b'cache')
+ return getgraphnode(repo, ctx, cache)
-def getgraphnode(repo, ctx):
- return getgraphnodecurrent(repo, ctx) or getgraphnodesymbol(ctx)
+def getgraphnode(repo, ctx, cache):
+ return getgraphnodecurrent(repo, ctx, cache) or getgraphnodesymbol(ctx)
-def getgraphnodecurrent(repo, ctx):
+def getgraphnodecurrent(repo, ctx, cache):
wpnodes = repo.dirstate.parents()
if wpnodes[1] == nullid:
wpnodes = wpnodes[:1]
if ctx.node() in wpnodes:
return b'@'
else:
+ merge_nodes = cache.get(b'merge_nodes')
+ if merge_nodes is None:
+ from . import merge
+
+ mergestate = merge.mergestate.read(repo)
+ if mergestate.active():
+ merge_nodes = (mergestate.local, mergestate.other)
+ else:
+ merge_nodes = ()
+ cache[b'merge_nodes'] = merge_nodes
+
+ if ctx.node() in merge_nodes:
+ return b'%'
return b''
@@ -548,7 +562,11 @@
"""helper method to generate a template keyword for a namespace"""
repo = context.resource(mapping, b'repo')
ctx = context.resource(mapping, b'ctx')
- ns = repo.names[namespace]
+ ns = repo.names.get(namespace)
+ if ns is None:
+ # namespaces.addnamespace() registers new template keyword, but
+ # the registered namespace might not exist in the current repo.
+ return
names = ns.names(repo, ctx.node())
return compatlist(
context, mapping, ns.templatename, names, plural=namespace
@@ -861,24 +879,6 @@
return scmutil.intrev(ctx)
-def showrevslist(context, mapping, name, revs):
- """helper to generate a list of revisions in which a mapped template will
- be evaluated"""
- repo = context.resource(mapping, b'repo')
- # revs may be a smartset; don't compute it until f() has to be evaluated
- def f():
- srevs = [b'%d' % r for r in revs]
- return _showcompatlist(context, mapping, name, srevs)
-
- return _hybrid(
- f,
- revs,
- lambda x: {name: x, b'ctx': repo[x]},
- pycompat.identity,
- keytype=int,
- )
-
-
@templatekeyword(b'subrepos', requires={b'ctx'})
def showsubrepos(context, mapping):
"""List of strings. Updated subrepositories in the changeset."""
--- a/mercurial/templater.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/templater.py Thu Apr 16 22:51:09 2020 +0530
@@ -45,6 +45,9 @@
hybriditem
represents a scalar printable value, also supports % operator.
+revslist
+ represents a list of revision numbers.
+
mappinggenerator, mappinglist
represents mappings (i.e. a list of dicts), which may have default
output format.
--- a/mercurial/templateutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/templateutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -15,6 +15,7 @@
from . import (
error,
pycompat,
+ smartset,
util,
)
from .utils import (
@@ -408,6 +409,79 @@
return _unthunk(context, mapping, self._value)
+class revslist(wrapped):
+ """Wrapper for a smartset (a list/set of revision numbers)
+
+ If name specified, the revs will be rendered with the old-style list
+ template of the given name by default.
+
+ The cachekey provides a hint to cache further computation on this
+ smartset. If the underlying smartset is dynamically created, the cachekey
+ should be None.
+ """
+
+ def __init__(self, repo, revs, name=None, cachekey=None):
+ assert isinstance(revs, smartset.abstractsmartset)
+ self._repo = repo
+ self._revs = revs
+ self._name = name
+ self.cachekey = cachekey
+
+ def contains(self, context, mapping, item):
+ rev = unwrapinteger(context, mapping, item)
+ return rev in self._revs
+
+ def getmember(self, context, mapping, key):
+ raise error.ParseError(_(b'not a dictionary'))
+
+ def getmin(self, context, mapping):
+ makehybriditem = self._makehybriditemfunc()
+ return makehybriditem(self._revs.min())
+
+ def getmax(self, context, mapping):
+ makehybriditem = self._makehybriditemfunc()
+ return makehybriditem(self._revs.max())
+
+ def filter(self, context, mapping, select):
+ makehybriditem = self._makehybriditemfunc()
+ frevs = self._revs.filter(lambda r: select(makehybriditem(r)))
+ # once filtered, no need to support old-style list template
+ return revslist(self._repo, frevs, name=None)
+
+ def itermaps(self, context):
+ makemap = self._makemapfunc()
+ for r in self._revs:
+ yield makemap(r)
+
+ def _makehybriditemfunc(self):
+ makemap = self._makemapfunc()
+ return lambda r: hybriditem(None, r, r, makemap)
+
+ def _makemapfunc(self):
+ repo = self._repo
+ name = self._name
+ if name:
+ return lambda r: {name: r, b'ctx': repo[r]}
+ else:
+ return lambda r: {b'ctx': repo[r]}
+
+ def join(self, context, mapping, sep):
+ return joinitems(self._revs, sep)
+
+ def show(self, context, mapping):
+ if self._name:
+ srevs = [b'%d' % r for r in self._revs]
+ return _showcompatlist(context, mapping, self._name, srevs)
+ else:
+ return self.join(context, mapping, b' ')
+
+ def tobool(self, context, mapping):
+ return bool(self._revs)
+
+ def tovalue(self, context, mapping):
+ return self._revs
+
+
class _mappingsequence(wrapped):
"""Wrapper for sequence of template mappings
--- a/mercurial/transaction.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/transaction.py Thu Apr 16 22:51:09 2020 +0530
@@ -30,9 +30,9 @@
# the changelog having been written).
postfinalizegenerators = {b'bookmarks', b'dirstate'}
-gengroupall = b'all'
-gengroupprefinalize = b'prefinalize'
-gengrouppostfinalize = b'postfinalize'
+GEN_GROUP_ALL = b'all'
+GEN_GROUP_PRE_FINALIZE = b'prefinalize'
+GEN_GROUP_POST_FINALIZE = b'postfinalize'
def active(func):
@@ -165,12 +165,6 @@
self._journal = journalname
self._undoname = undoname
self._queue = []
- # A callback to validate transaction content before closing it.
- # should raise exception is anything is wrong.
- # target user is repository hooks.
- if validator is None:
- validator = lambda tr: None
- self._validator = validator
# A callback to do something just after releasing transaction.
if releasefn is None:
releasefn = lambda tr, success: None
@@ -214,6 +208,11 @@
self._anypending = False
# holds callback to call when writing the transaction
self._finalizecallback = {}
+ # holds callback to call when validating the transaction
+ # should raise exception if anything is wrong
+ self._validatecallback = {}
+ if validator is not None:
+ self._validatecallback[b'001-userhooks'] = validator
# hold callback for post transaction close
self._postclosecallback = {}
# holds callbacks to call during abort
@@ -352,19 +351,25 @@
if genid in self._filegenerators:
del self._filegenerators[genid]
- def _generatefiles(self, suffix=b'', group=gengroupall):
+ def _generatefiles(self, suffix=b'', group=GEN_GROUP_ALL):
# write files registered for generation
any = False
+
+ if group == GEN_GROUP_ALL:
+ skip_post = skip_pre = False
+ else:
+ skip_pre = group == GEN_GROUP_POST_FINALIZE
+ skip_post = group == GEN_GROUP_PRE_FINALIZE
+
for id, entry in sorted(pycompat.iteritems(self._filegenerators)):
any = True
order, filenames, genfunc, location = entry
# for generation at closing, check if it's before or after finalize
- postfinalize = group == gengrouppostfinalize
- if (
- group != gengroupall
- and (id in postfinalizegenerators) != postfinalize
- ):
+ is_post = id in postfinalizegenerators
+ if skip_post and is_post:
+ continue
+ elif skip_pre and not is_post:
continue
vfs = self._vfsmap[location]
@@ -500,12 +505,22 @@
self._abortcallback[category] = callback
@active
+ def addvalidator(self, category, callback):
+ """ adds a callback to be called when validating the transaction.
+
+ The transaction will be given as the first argument to the callback.
+
+ callback should raise exception if to abort transaction """
+ self._validatecallback[category] = callback
+
+ @active
def close(self):
'''commit the transaction'''
if self._count == 1:
- self._validator(self) # will raise exception if needed
- self._validator = None # Help prevent cycles.
- self._generatefiles(group=gengroupprefinalize)
+ for category in sorted(self._validatecallback):
+ self._validatecallback[category](self)
+ self._validatecallback = None # Help prevent cycles.
+ self._generatefiles(group=GEN_GROUP_PRE_FINALIZE)
while self._finalizecallback:
callbacks = self._finalizecallback
self._finalizecallback = {}
@@ -514,7 +529,7 @@
callbacks[cat](self)
# Prevent double usage and help clear cycles.
self._finalizecallback = None
- self._generatefiles(group=gengrouppostfinalize)
+ self._generatefiles(group=GEN_GROUP_POST_FINALIZE)
self._count -= 1
if self._count != 0:
--- a/mercurial/ui.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/ui.py Thu Apr 16 22:51:09 2020 +0530
@@ -1868,7 +1868,7 @@
rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
if rc and onerr:
errmsg = b'%s %s' % (
- os.path.basename(cmd.split(None, 1)[0]),
+ procutil.shellsplit(cmd)[0],
procutil.explainexit(rc),
)
if errprefix:
@@ -1945,30 +1945,6 @@
if self._progbar is not None and self._progbar.printed:
self._progbar.clear()
- def progress(self, topic, pos, item=b"", unit=b"", total=None):
- '''show a progress message
-
- By default a textual progress bar will be displayed if an operation
- takes too long. 'topic' is the current operation, 'item' is a
- non-numeric marker of the current position (i.e. the currently
- in-process file), 'pos' is the current numeric position (i.e.
- revision, bytes, etc.), unit is a corresponding unit label,
- and total is the highest expected pos.
-
- Multiple nested topics may be active at a time.
-
- All topics should be marked closed by setting pos to None at
- termination.
- '''
- self.deprecwarn(
- b"use ui.makeprogress() instead of ui.progress()", b"5.1"
- )
- progress = self.makeprogress(topic, unit, total)
- if pos is not None:
- progress.update(pos, item=item)
- else:
- progress.complete()
-
def makeprogress(self, topic, unit=b"", total=None):
"""Create a progress helper for the specified topic"""
if getattr(self._fmsgerr, 'structured', False):
--- a/mercurial/upgrade.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/upgrade.py Thu Apr 16 22:51:09 2020 +0530
@@ -449,7 +449,14 @@
@classmethod
def fromconfig(cls, repo):
- return repo.ui.config(b'format', b'revlog-compression')
+ compengines = repo.ui.configlist(b'format', b'revlog-compression')
+ # return the first valid value as the selection code would do
+ for comp in compengines:
+ if comp in util.compengines:
+ return comp
+
+ # no valide compression found lets display it all for clarity
+ return b','.join(compengines)
@registerformatvariant
@@ -1122,7 +1129,7 @@
"""Upgrade a repository in place."""
if optimize is None:
optimize = []
- optimize = set(legacy_opts_map.get(o, o) for o in optimize)
+ optimize = {legacy_opts_map.get(o, o) for o in optimize}
repo = repo.unfiltered()
revlogs = set(UPGRADE_ALL_REVLOGS)
--- a/mercurial/url.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/url.py Thu Apr 16 22:51:09 2020 +0530
@@ -224,13 +224,11 @@
def _generic_proxytunnel(self):
- proxyheaders = dict(
- [
- (x, self.headers[x])
- for x in self.headers
- if x.lower().startswith('proxy-')
- ]
- )
+ proxyheaders = {
+ x: self.headers[x]
+ for x in self.headers
+ if x.lower().startswith('proxy-')
+ }
self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
for header in pycompat.iteritems(proxyheaders):
self.send(b'%s: %s\r\n' % header)
@@ -676,7 +674,9 @@
url_, authinfo = u.authinfo()
else:
path = util.normpath(os.path.abspath(url_))
- url_ = b'file://' + pycompat.bytesurl(urlreq.pathname2url(path))
+ url_ = b'file://' + pycompat.bytesurl(
+ urlreq.pathname2url(pycompat.fsdecode(path))
+ )
authinfo = None
return opener(ui, authinfo, sendaccept=sendaccept).open(
pycompat.strurl(url_), data
--- a/mercurial/util.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/util.py Thu Apr 16 22:51:09 2020 +0530
@@ -130,6 +130,16 @@
unlink = platform.unlink
username = platform.username
+
+def setumask(val):
+ ''' updates the umask. used by chg server '''
+ if pycompat.iswindows:
+ return
+ os.umask(val)
+ global umask
+ platform.umask = umask = val & 0o777
+
+
# small compat layer
compengines = compression.compengines
SERVERROLE = compression.SERVERROLE
@@ -1846,14 +1856,14 @@
return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
-def checksignature(func):
+def checksignature(func, depth=1):
'''wrap a function with code to check for calling errors'''
def check(*args, **kwargs):
try:
return func(*args, **kwargs)
except TypeError:
- if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
+ if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
raise error.SignatureError
raise
@@ -2213,7 +2223,7 @@
'''
def _makefspathcacheentry(dir):
- return dict((normcase(n), n) for n in os.listdir(dir))
+ return {normcase(n): n for n in os.listdir(dir)}
seps = pycompat.ossep
if pycompat.osaltsep:
--- a/mercurial/utils/storageutil.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/utils/storageutil.py Thu Apr 16 22:51:09 2020 +0530
@@ -364,7 +364,7 @@
if nodesorder == b'nodes':
revs = [frev(n) for n in nodes]
elif nodesorder == b'linear':
- revs = set(frev(n) for n in nodes)
+ revs = {frev(n) for n in nodes}
revs = dagop.linearize(revs, store.parentrevs)
else: # storage and default
revs = sorted(frev(n) for n in nodes)
--- a/mercurial/vfs.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/vfs.py Thu Apr 16 22:51:09 2020 +0530
@@ -417,7 +417,7 @@
file were opened multiple times, there could be unflushed data
because the original file handle hasn't been flushed/closed yet.)
- ``checkambig`` argument is passed to atomictemplfile (valid
+ ``checkambig`` argument is passed to atomictempfile (valid
only for writing), and is useful only if target file is
guarded by any lock (e.g. repo.lock or repo.wlock).
--- a/mercurial/windows.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/windows.py Thu Apr 16 22:51:09 2020 +0530
@@ -535,13 +535,11 @@
cache = dircache.get(dir, None)
if cache is None:
try:
- dmap = dict(
- [
- (normcase(n), s)
- for n, k, s in listdir(dir, True)
- if getkind(s.st_mode) in _wantedkinds
- ]
- )
+ dmap = {
+ normcase(n): s
+ for n, k, s in listdir(dir, True)
+ if getkind(s.st_mode) in _wantedkinds
+ }
except OSError as err:
# Python >= 2.5 returns ENOENT and adds winerror field
# EINVAL is raised if dir is not a directory.
--- a/mercurial/wireprototypes.py Mon Apr 13 16:30:13 2020 +0300
+++ b/mercurial/wireprototypes.py Thu Apr 16 22:51:09 2020 +0530
@@ -383,8 +383,8 @@
# reason for it (like server operators wanting to achieve specific
# performance characteristics). So fail fast if the config references
# unusable compression engines.
- validnames = set(e.name() for e in compengines)
- invalidnames = set(e for e in configengines if e not in validnames)
+ validnames = {e.name() for e in compengines}
+ invalidnames = {e for e in configengines if e not in validnames}
if invalidnames:
raise error.Abort(
_(b'invalid compression engine defined in %s: %s')
--- a/relnotes/next Mon Apr 13 16:30:13 2020 +0300
+++ b/relnotes/next Thu Apr 16 22:51:09 2020 +0530
@@ -1,14 +1,93 @@
== New Features ==
+ * `hg purge`/`hg clean` can now delete ignored files instead of
+ untracked files, with the new -i flag.
+
+ * `hg pull` now has a `--confirm` flag to prompt before applying changes.
+ Config option `pull.confirm` is also added for that.
+
+ * `hg log` now defaults to using an '%' symbol for commits involved
+ in unresolved merge conflicts. That includes unresolved conflicts
+ caused by e.g. `hg update --merge` and `hg graft`. '@' still takes
+ precedence, so what used to be marked '@' still is.
+
+ * New `conflictlocal()` and `conflictother()` revsets return the
+ commits that are being merged, when there are conflicts. Also works
+ for conflicts caused by e.g. `hg graft`.
+
+ * `hg copy --forget` can be used to unmark a file as copied.
+
+ * The `format.revlog-compression` configuration entry now accept a list. The
+ first available option will be used. for example setting::
+
+ [format]
+ revlog-compression=zstd, zlib
+
+ Will use `zstd` compression for new repositories is available, and will
+ simply fall back to `zlib` if not.
+
+ * `hg debugmergestate` output is now templated, which may be useful
+ e.g. for IDEs that want to help the user resolve merge conflicts.
+
== New Experimental Features ==
+ * `hg copy` now supports a `--at-rev` argument to mark files as
+ copied in the specified commit. It only works with `--after` for
+ now (i.e., it's only useful for marking files copied using non-hg
+ `cp` as copied).
+
+ * Use `hg copy --forget --at-rev REV` to unmark already committed
+ copies.
== Bug Fixes ==
+ * Fix server exception when concurrent pushes delete the same bookmark
+
+ * Prevent pushes of divergent bookmarks (foo@remote)
+
+ * The push error "remote repository changed while pushing - please
+ try again" now only happens when a concurrent push changed related
+ heads (instead of when a concurrent pushed any revision).
+
== Backwards Compatibility Changes ==
+ * When `hg rebase` pauses for merge conflict resolution, the working
+ copy will no longer have the rebased node as a second parent. You
+ can use the new `conflictparents()` revset for finding the other
+ parent during a conflict.
+
+ * `hg rebase` now accepts repeated `--source` and `--base`
+ arguments. For example, `hg rebase --source 'A + B'` is equivalent
+ to `hg rebase --source A --source B`. This is a
+ backwards-incompatible change because it will break overriding an
+ alias `myrebase = rebase --source A` by `hg myrebase --source B`
+ (it will now rebase `(A + B)::` instead of `B::`).
+
+ * `hg recover` does not verify the validity of the whole repository
+ anymore. You can pass `--verify` or call `hg verify` if necessary.
+
+ * `hg debugmergestate` output format changed. Let us know if that is
+ causing you problems and we'll roll it back.
+
+ * Resolved merge conflicts are now cleared by `hg commit` even if the
+ working copy has no changes.
+
== Internal API Changes ==
+ * The deprecated `ui.progress()` has now been deleted. Please use
+ `ui.makeprogress()` instead.
+
+ * `hg.merge()` now takes a `ctx` instead of the previous `repo` and
+ `node` arguments.
+
+ * `hg.merge()` has lost its `abort` argument. Please call
+ `hg.abortmerge()` directly instead.
+
+ * `hg.merge()` has lost its `mergeforce` argument. It should have
+ only ever been called with the same value as the `force` argument.
+
+ * The `*others` argument of `cmdutil.check_incompatible_arguments()`
+ changed from being varargs argument to being a single collection.
--- a/rust/Cargo.lock Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/Cargo.lock Thu Apr 16 22:51:09 2020 +0530
@@ -2,23 +2,33 @@
# It is not intended for manual editing.
[[package]]
name = "aho-corasick"
-version = "0.7.6"
+version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "arrayvec"
-version = "0.4.12"
+name = "ansi_term"
+version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "hermit-abi 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "autocfg"
-version = "0.1.6"
+version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -28,17 +38,13 @@
[[package]]
name = "byteorder"
-version = "1.3.2"
+version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "c2-chacha"
-version = "0.2.2"
+name = "cc"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "cfg-if"
@@ -46,94 +52,180 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "cloudabi"
-version = "0.0.3"
+name = "chrono"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "clap"
+version = "2.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-width 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "colored"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cpython"
-version = "0.3.0"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "python27-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "python3-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-channel 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-deque"
-version = "0.7.1"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-epoch"
-version = "0.7.2"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-queue"
-version = "0.1.2"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-utils"
-version = "0.6.6"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "ctor"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "difference"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "either"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "fuchsia-cprng"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "getrandom"
-version = "0.1.12"
+version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "hermit-abi"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "hex"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "hg-core"
version = "0.1.0"
dependencies = [
- "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "micro-timer 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -141,17 +233,11 @@
name = "hg-cpython"
version = "0.1.0"
dependencies = [
- "cpython 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"hg-core 0.1.0",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "hgdirectffi"
-version = "0.1.0"
-dependencies = [
- "hg-core 0.1.0",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "simple_logger 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -161,141 +247,183 @@
[[package]]
name = "libc"
-version = "0.2.64"
+version = "0.2.67"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "log"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "maybe-uninit"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memchr"
-version = "2.2.1"
+version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "memmap"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "memoffset"
-version = "0.5.1"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "nodrop"
-version = "0.1.14"
+name = "micro-timer"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "micro-timer-macros 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "micro-timer-macros"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "num-traits"
-version = "0.2.8"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num_cpus"
-version = "1.10.1"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hermit-abi 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "output_vt100"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ppv-lite86"
-version = "0.2.5"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "pretty_assertions"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ctor 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "python27-sys"
-version = "0.3.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "python3-sys"
-version = "0.3.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand"
-version = "0.6.5"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_chacha"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.2.1"
+version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_core"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "rand_hc"
-version = "0.1.0"
+name = "rand_distr"
+version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -307,100 +435,65 @@
]
[[package]]
-name = "rand_isaac"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_jitter"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_os"
-version = "0.1.3"
+name = "rand_pcg"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_pcg"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_xorshift"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon"
-version = "1.2.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "rayon-core 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon-core 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon-core"
-version = "1.6.0"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "rdrand"
-version = "0.4.0"
+name = "redox_syscall"
+version = "0.1.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "regex"
-version = "1.3.1"
+version = "1.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
- "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
-version = "0.6.12"
+version = "0.6.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "remove_dir_all"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -409,8 +502,16 @@
]
[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "scopeguard"
-version = "1.0.0"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -427,24 +528,97 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "simple_logger"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "colored 1.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "syn"
+version = "1.0.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-width 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "thread_local"
-version = "0.3.6"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "time"
+version = "0.1.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "twox-hash"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "unicode-width"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "vec_map"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "wasi"
-version = "0.7.0"
+version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -462,63 +636,92 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "winapi-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
-"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
-"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9"
-"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"
+"checksum aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)" = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada"
+"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
+"checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
-"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
-"checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101"
+"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+"checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
-"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
-"checksum cpython 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "85532c648315aeb0829ad216a6a29aa3212cf9319bc7f6daf1404aa0bdd1485f"
-"checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71"
-"checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9"
-"checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b"
-"checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
+"checksum chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2"
+"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
+"checksum colored 1.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f4ffc801dacf156c5854b9df4f425a626539c3a6ef7893cc0c5084a23f0b6c59"
+"checksum cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95"
+"checksum crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e"
+"checksum crossbeam-channel 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cced8691919c02aac3cb0a1bc2e9b73d89e832bf9a06fc579d4e71b68a2da061"
+"checksum crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285"
+"checksum crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
+"checksum crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c695eeca1e7173472a32221542ae469b3e9aac3a4fc81f7696bcad82029493db"
+"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
+"checksum ctor 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "47c5e5ac752e18207b12e16b10631ae5f7f68f8805f335f9b817ead83d9ffce1"
+"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
-"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
-"checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571"
+"checksum getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
+"checksum hermit-abi 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1010591b26bbfe835e9faeabeb11866061cc7dcebffd56ad7d0942d0e61aefd8"
+"checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-"checksum libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "74dfca3d9957906e8d1e6a0b641dc9a59848e793f1da2165889fd4f62d10d79c"
-"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
-"checksum memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ce6075db033bbbb7ee5a0bbd3a3186bbae616f57fb001c485c7ff77955f8177f"
-"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
-"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
-"checksum num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcef43580c035376c0705c42792c294b66974abbfd2789b511784023f71f3273"
-"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b"
-"checksum python27-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "372555e88a6bc8109eb641380240dc8d25a128fc48363ec9075664daadffdd5b"
-"checksum python3-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f3a8ebed3f1201fda179f3960609dbbc10cd8c75e9f2afcb03788278f367d8ea"
-"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
-"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
-"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
-"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
-"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
-"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
+"checksum libc 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "eb147597cdf94ed43ab7a9038716637d2d1bf2bc571da995d0028dec06bd3018"
+"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
+"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
+"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
+"checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
+"checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9"
+"checksum micro-timer 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "987429cd6162a80ed5ff44fc790f5090b1c6d617ac73a2e272965ed91201d79b"
+"checksum micro-timer-macros 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43cec5c0b38783eb33ef7bccf4b250b7a085703e11f5f2238fa31969e629388a"
+"checksum num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "3f6ea62e9d81a77cd3ee9a2a5b9b609447857f3d358704331e4ef39eb247fcba"
+"checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
+"checksum num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6"
+"checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
+"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
+"checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
+"checksum proc-macro2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "6c09721c6781493a2a492a96b5a5bf19b65917fe6728884e7c44dd0c60ca3435"
+"checksum python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c"
+"checksum python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4"
+"checksum quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f"
+"checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+"checksum rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
-"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
+"checksum rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2"
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
-"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
-"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
-"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
-"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
-"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
-"checksum rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123"
-"checksum rayon-core 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "98dcf634205083b17d0861252431eb2acbfb698ab7478a2d20de07954f47ec7b"
-"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
-"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd"
-"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
+"checksum rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
+"checksum rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db6ce3297f9c85e16621bb8cca38a06779ffc31bb8184e1be4bed2be4678a098"
+"checksum rayon-core 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "08a89b46efaf957e52b18062fb2f4660f8b8a4dde1807ca002690868ef2c85a9"
+"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
+"checksum regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3"
+"checksum regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)" = "7fe5bd57d1d7414c6b5ed48563a2c855d995ff777729dcd91c369ec7fea395ae"
+"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
-"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
+"checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+"checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
-"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
+"checksum simple_logger 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fea0c4611f32f4c2bac73754f22dca1f57e6c1945e0590dae4e5f2a077b92367"
+"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+"checksum syn 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "123bd9499cfb380418d509322d7a6d52e5315f064fe4b3ad18a53d6b92c07859"
+"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
+"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+"checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
+"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
"checksum twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bfd5b7557925ce778ff9b9ef90e3ade34c524b5ff10e239c69a42d546d2af56"
-"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
+"checksum unicode-width 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "caaa9d531767d1ff2150b9332433f32a24622147e5ebb1f26409d5da67afd479"
+"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
+"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
+"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+"checksum winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfbf554c6ad11084fb7517daca16cfdcaccbdadba4fc336f032a8b12c2ad80"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/Cargo.toml Thu Apr 16 22:51:09 2020 +0530
@@ -1,3 +1,3 @@
[workspace]
-members = ["hg-core", "hg-direct-ffi", "hg-cpython"]
+members = ["hg-core", "hg-cpython"]
exclude = ["chg", "hgcli"]
--- a/rust/README.rst Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/README.rst Thu Apr 16 22:51:09 2020 +0530
@@ -11,28 +11,75 @@
- hgcli. A experiment for starting hg in rust rather than in python,
by linking with the python runtime. Probably meant to be replaced by
PyOxidizer at some point.
-- hg-core (and hg-cpython/hg-directffi): implementation of some
+- hg-core (and hg-cpython): implementation of some
functionality of mercurial in rust, e.g. ancestry computations in
- revision graphs or pull discovery. The top-level ``Cargo.toml`` file
+ revision graphs, status or pull discovery. The top-level ``Cargo.toml`` file
defines a workspace containing these crates.
-Using hg-core
-=============
+Using Rust code
+===============
Local use (you need to clean previous build artifacts if you have
built without rust previously)::
- $ HGWITHRUSTEXT=cpython make local # to use ./hg
- $ HGWITHRUSTEXT=cpython make tests # to run all tests
- $ (cd tests; HGWITHRUSTEXT=cpython ./run-tests.py) # only the .t
- $ ./hg debuginstall | grep rust # to validate rust is in use
+ $ make PURE=--rust local # to use ./hg
+ $ ./tests/run-tests.py --rust # to run all tests
+ $ ./hg debuginstall | grep -i rust # to validate rust is in use
+ checking Rust extensions (installed)
checking module policy (rust+c-allow)
+ checking "re2" regexp engine Rust bindings (installed)
+
+
+If the environment variable ``HGWITHRUSTEXT=cpython`` is set, the Rust
+extension will be used by default unless ``--no-rust``.
+
+One day we may use this environment variable to switch to new experimental
+binding crates like a hypothetical ``HGWITHRUSTEXT=hpy``.
+
+Using the fastest ``hg status``
+-------------------------------
+
+The code for ``hg status`` needs to conform to ``.hgignore`` rules, which are
+all translated into regex.
+
+In the first version, for compatibility and ease of development reasons, the
+Re2 regex engine was chosen until we figured out if the ``regex`` crate had
+similar enough behavior.
+
+Now that that work has been done, the default behavior is to use the ``regex``
+crate, that provides a significant performance boost compared to the standard
+Python + C path in many commands such as ``status``, ``diff`` and ``commit``,
-Setting ``HGWITHRUSTEXT`` to other values like ``true`` is deprecated
-and enables only a fraction of the rust code.
+However, the ``Re2`` path remains slightly faster for our use cases and remains
+a better option for getting the most speed out of your Mercurial.
+
+If you want to use ``Re2``, you need to install ``Re2`` following Google's
+guidelines: https://github.com/google/re2/wiki/Install.
+Then, use ``HG_RUST_FEATURES=with-re2`` and
+``HG_RE2_PATH=system|<path to your re2 install>`` when building ``hg`` to
+signal the use of Re2. Using the local path instead of the "system" RE2 links
+it statically.
+
+For example::
-Developing hg-core
-==================
+ $ HG_RUST_FEATURES=with-re2 HG_RE2_PATH=system make PURE=--rust
+ $ # OR
+ $ HG_RUST_FEATURES=with-re2 HG_RE2_PATH=/path/to/re2 make PURE=--rust
+
+Developing Rust
+===============
+
+The current version of Rust in use is ``1.34.2``, because it's what Debian
+stable has. You can use ``rustup override set 1.34.2`` at the root of the repo
+to make it easier on you.
+
+Go to the ``hg-cpython`` folder::
+
+ $ cd rust/hg-cpython
+
+Or, only the ``hg-core`` folder. Be careful not to break compatibility::
+
+ $ cd rust/hg-core
Simply run::
@@ -46,7 +93,35 @@
$ cargo check
+For even faster typing::
+
+ $ cargo c
+
You can run only the rust-specific tests (as opposed to tests of
mercurial as a whole) with::
$ cargo test --all
+
+Formatting the code
+-------------------
+
+We use ``rustfmt`` to keep the code formatted at all times. For now, we are
+using the nightly version because it has been stable enough and provides
+comment folding.
+
+To format the entire Rust workspace::
+
+ $ cargo +nightly fmt
+
+This requires you to have the nightly toolchain installed.
+
+Additional features
+-------------------
+
+As mentioned in the section about ``hg status``, code paths using ``re2`` are
+opt-in.
+
+For example::
+
+ $ cargo check --features with-re2
+
--- a/rust/chg/Cargo.lock Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/Cargo.lock Thu Apr 16 22:51:09 2020 +0530
@@ -1,53 +1,57 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
[[package]]
-name = "arrayvec"
-version = "0.4.7"
+name = "arc-swap"
+version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
-]
+
+[[package]]
+name = "autocfg"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "bitflags"
-version = "1.0.4"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
-version = "1.2.6"
+version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "bytes"
-version = "0.4.10"
+version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cc"
-version = "1.0.25"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
-version = "0.1.5"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "chg"
version = "0.1.0"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-hglib 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-process 0.2.2 (git+https://github.com/alexcrichton/tokio-process)",
- "tokio-timer 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-process 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -55,34 +59,72 @@
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-deque"
-version = "0.6.1"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-epoch 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-epoch"
-version = "0.5.2"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memoffset 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-utils"
-version = "0.5.0"
+version = "0.6.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -90,7 +132,7 @@
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -101,16 +143,23 @@
[[package]]
name = "futures"
-version = "0.1.24"
+version = "0.1.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "hermit-abi"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "iovec"
-version = "0.1.2"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -124,59 +173,58 @@
[[package]]
name = "lazy_static"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "lazycell"
-version = "1.2.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "libc"
-version = "0.2.43"
+version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lock_api"
-version = "0.1.4"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "log"
-version = "0.4.5"
+version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "maybe-uninit"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "memoffset"
-version = "0.2.1"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "mio"
-version = "0.6.16"
+version = "0.6.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazycell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
- "slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -185,10 +233,10 @@
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -196,9 +244,9 @@
version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -217,8 +265,8 @@
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -226,73 +274,47 @@
version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "nodrop"
-version = "0.1.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "num_cpus"
-version = "1.8.0"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "owning_ref"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hermit-abi 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot"
-version = "0.6.4"
+version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lock_api 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot_core"
-version = "0.3.1"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "rand"
-version = "0.5.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "redox_syscall"
-version = "0.1.40"
+version = "0.1.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -305,7 +327,7 @@
[[package]]
name = "scopeguard"
-version = "0.3.3"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -322,90 +344,97 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "signal-hook-registry"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "arc-swap 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "slab"
-version = "0.4.1"
+version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "smallvec"
-version = "0.6.5"
+version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "socket2"
-version = "0.3.8"
+version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "stable_deref_trait"
-version = "1.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "tokio"
-version = "0.1.11"
+version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-current-thread 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-fs 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-tcp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-threadpool 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-timer 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-udp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-uds 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-fs 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-tcp 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-udp 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-uds 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-codec"
-version = "0.1.1"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-current-thread"
-version = "0.1.3"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-executor"
-version = "0.1.5"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-fs"
-version = "0.1.3"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-threadpool 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -413,181 +442,165 @@
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-process 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-uds 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-process 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-uds 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-io"
-version = "0.1.9"
+version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "tokio-process"
-version = "0.2.2"
-source = "git+https://github.com/alexcrichton/tokio-process#2e805aad57e2639246cbf7394899bf7a27c18ebd"
-dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-signal 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-process"
-version = "0.2.2"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-signal 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-signal 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-reactor"
-version = "0.1.6"
+version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-signal"
-version = "0.2.5"
+version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tokio-sync"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-tcp"
-version = "0.1.2"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-threadpool"
-version = "0.1.7"
+version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-deque 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-timer"
-version = "0.2.7"
+version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-udp"
-version = "0.1.2"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-uds"
-version = "0.2.2"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "unreachable"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "version_check"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "void"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
-version = "0.3.6"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -619,70 +632,69 @@
]
[metadata]
-"checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"
-"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
-"checksum byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "90492c5858dd7d2e78691cfb89f90d273a2800fc11d98f60786e5d87e2f83781"
-"checksum bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0ce55bd354b095246fc34caf4e9e242f5297a7fd938b090cadfea6eee614aa62"
-"checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16"
-"checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3"
+"checksum arc-swap 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d663a8e9a99154b5fb793032533f6328da35e23aac63d5c152279aa8ba356825"
+"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
+"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+"checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c"
+"checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd"
+"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
-"checksum crossbeam-deque 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3486aefc4c0487b9cb52372c97df0a48b8c249514af1ee99703bf70d2f2ceda1"
-"checksum crossbeam-epoch 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "30fecfcac6abfef8771151f8be4abc9e4edc112c2bcb233314cafde2680536e9"
-"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015"
+"checksum crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285"
+"checksum crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
+"checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b"
+"checksum crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c695eeca1e7173472a32221542ae469b3e9aac3a4fc81f7696bcad82029493db"
+"checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
+"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
+"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
-"checksum futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "0c84b40c7e2de99ffd70602db314a7a8c26b2b3d830e6f7f7a142a8860ab3ca4"
-"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
+"checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef"
+"checksum hermit-abi 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "725cf19794cf90aa94e65050cb4191ff5d8fa87a498383774c47b332e3af952e"
+"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7"
-"checksum lazycell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ddba4c30a78328befecec92fc94970e53b3ae385827d28620f0f5bb2493081e0"
-"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
-"checksum lock_api 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "775751a3e69bde4df9b38dd00a1b5d6ac13791e4223d4a0506577f0dd27cfb7a"
-"checksum log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fcce5fa49cc693c312001daf1d13411c4a5283796bac1084299ea3e567113f"
-"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
-"checksum mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)" = "71646331f2619b1026cc302f87a2b8b648d5c6dd6937846a16cc8ce0f347f432"
+"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+"checksum libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)" = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0"
+"checksum lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "79b2de95ecb4691949fea4716ca53cdbcfccb2c612e19644a8bad05edcf9f47b"
+"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
+"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
+"checksum memoffset 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b4fc2c02a7e374099d4ee95a193111f72d2110197fe200272371758f6c3643d8"
+"checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f"
"checksum mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f5e374eff525ce1c5b7687c4cef63943e7686524a387933ad27ca7ec43779cb3"
"checksum mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125"
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
"checksum miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "396aa0f2003d7df8395cb93e09871561ccc3e785f0acb369170e8cc74ddf9226"
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
-"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2"
-"checksum num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c51a3322e4bca9d212ad9a158a02abc6934d005490c054a2778df73a70aa0a30"
-"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
-"checksum parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0802bff09003b291ba756dc7e79313e51cc31667e94afbe847def490424cde5"
-"checksum parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad7f7e6ebdc79edff6fdcb87a55b620174f7a989e3eb31b65231f4af57f00b8c"
-"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
-"checksum rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "edecf0f94da5551fc9b492093e30b041a891657db7940ee221f9d2f66e82eef2"
-"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
+"checksum num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6"
+"checksum parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
+"checksum parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b"
+"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
-"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27"
+"checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
-"checksum slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9776d6b986f77b35c6cf846c11ad986ff128fe0b2b63a3628e3755e8d3102d"
-"checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d"
-"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7"
-"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
-"checksum tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "6e93c78d23cc61aa245a8acd2c4a79c4d7fa7fb5c3ca90d5737029f043a84895"
-"checksum tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c501eceaf96f0e1793cf26beb63da3d11c738c4a943fdf3746d81d64684c39f"
-"checksum tokio-current-thread 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f90fcd90952f0a496d438a976afba8e5c205fb12123f813d8ab3aa1c8436638c"
-"checksum tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "c117b6cf86bb730aab4834f10df96e4dd586eff2c3c27d3781348da49e255bde"
-"checksum tokio-fs 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b5cbe4ca6e71cb0b62a66e4e6f53a8c06a6eefe46cc5f665ad6f274c9906f135"
+"checksum signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94f478ede9f64724c5d173d7bb56099ec3e2d9fc2774aac65d34b8b890405f41"
+"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
+"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
+"checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85"
+"checksum tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6"
+"checksum tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "25b2998660ba0e70d18684de5d06b70b70a3a747469af9dea7618cc59e75976b"
+"checksum tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e"
+"checksum tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671"
+"checksum tokio-fs 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "297a1206e0ca6302a0eed35b700d292b275256f596e2f3fea7729d5e629b6ff4"
"checksum tokio-hglib 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a138c3cb866c8a95ceddae44634bb159eefeebcdba45aec2158f8ad6c201e6d"
-"checksum tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "8b8a85fffbec3c5ab1ab62324570230dcd37ee5996a7859da5caf7b9d45e3e8c"
-"checksum tokio-process 0.2.2 (git+https://github.com/alexcrichton/tokio-process)" = "<none>"
-"checksum tokio-process 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0832648d1ff7ca42c06ca45dc76797b92c56500de828e33c77276fa1449947b6"
-"checksum tokio-reactor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4b26fd37f1125738b2170c80b551f69ff6fecb277e6e5ca885e53eec2b005018"
-"checksum tokio-signal 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b6893092932264944edee8486d54b578c7098bea794aedaf9bd7947b49e6b7bf"
-"checksum tokio-tcp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7ad235e9dadd126b2d47f6736f65aa1fdcd6420e66ca63f44177bc78df89f912"
-"checksum tokio-threadpool 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bbd8a8b911301c60cbfaa2a6588fb210e5c1038375b8bdecc47aa09a94c3c05f"
-"checksum tokio-timer 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "3a52f00c97fedb6d535d27f65cccb7181c8dd4c6edc3eda9ea93f6d45d05168e"
-"checksum tokio-udp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "da941144b816d0dcda4db3a1ba87596e4df5e860a72b70783fe435891f80601c"
-"checksum tokio-uds 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "22e3aa6d1fcc19e635418dc0a30ab5bd65d347973d6f43f1a37bf8d9d1335fc9"
-"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
-"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
-"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+"checksum tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674"
+"checksum tokio-process 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "382d90f43fa31caebe5d3bc6cfd854963394fff3b8cb59d5146607aaae7e7e43"
+"checksum tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351"
+"checksum tokio-signal 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c34c6e548f101053321cba3da7cbb87a610b85555884c41b07da2eb91aff12"
+"checksum tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee"
+"checksum tokio-tcp 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72"
+"checksum tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "df720b6581784c118f0eb4310796b12b1d242a7eb95f716a8367855325c25f89"
+"checksum tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296"
+"checksum tokio-udp 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e2a0b10e610b39c38b031a2fcab08e4b82f16ece36504988dcbd81dbba650d82"
+"checksum tokio-uds 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "5076db410d6fdc6523df7595447629099a1fdc47b3d9f896220780fa48faf798"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
-"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
+"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/chg/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/Cargo.toml Thu Apr 16 22:51:09 2020 +0530
@@ -4,6 +4,7 @@
authors = ["Yuya Nishihara <yuya@tcha.org>"]
description = "Client for Mercurial command server with cHg extension"
license = "GPL-2.0+"
+edition = "2018"
[dependencies]
bytes = "0.4"
@@ -12,8 +13,7 @@
log = { version = "0.4", features = ["std"] }
tokio = "0.1"
tokio-hglib = "0.2"
-# TODO: "^0.2.3" once released. we need AsRawFd support.
-tokio-process = { git = "https://github.com/alexcrichton/tokio-process" }
+tokio-process = "0.2.3"
tokio-timer = "0.2"
[build-dependencies]
--- a/rust/chg/build.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/build.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,5 +1,3 @@
-extern crate cc;
-
fn main() {
cc::Build::new()
.warnings(true)
--- a/rust/chg/src/attachio.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/attachio.rs Thu Apr 16 22:51:09 2020 +0530
@@ -5,15 +5,15 @@
//! Functions to send client-side fds over the command server channel.
-use futures::{Async, Future, Poll};
+use futures::{try_ready, Async, Future, Poll};
use std::io;
use std::os::unix::io::AsRawFd;
use tokio_hglib::codec::ChannelMessage;
use tokio_hglib::protocol::MessageLoop;
use tokio_hglib::{Client, Connection};
-use super::message;
-use super::procutil;
+use crate::message;
+use crate::procutil;
/// Future to send client-side fds over the command server channel.
///
--- a/rust/chg/src/clientext.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/clientext.rs Thu Apr 16 22:51:09 2020 +0530
@@ -5,17 +5,20 @@
//! cHg extensions to command server client.
+use bytes::{BufMut, Bytes, BytesMut};
use std::ffi::OsStr;
+use std::io;
+use std::mem;
use std::os::unix::ffi::OsStrExt;
use std::os::unix::io::AsRawFd;
use std::path::Path;
-use tokio_hglib::protocol::OneShotRequest;
+use tokio_hglib::protocol::{OneShotQuery, OneShotRequest};
use tokio_hglib::{Client, Connection};
-use super::attachio::AttachIo;
-use super::message;
-use super::runcommand::ChgRunCommand;
-use super::uihandler::SystemHandler;
+use crate::attachio::AttachIo;
+use crate::message::{self, Instruction};
+use crate::runcommand::ChgRunCommand;
+use crate::uihandler::SystemHandler;
pub trait ChgClientExt<C>
where
@@ -29,16 +32,41 @@
E: AsRawFd;
/// Changes the working directory of the server.
- fn set_current_dir<P>(self, dir: P) -> OneShotRequest<C>
- where
- P: AsRef<Path>;
+ fn set_current_dir(self, dir: impl AsRef<Path>) -> OneShotRequest<C>;
+
+ /// Updates the environment variables of the server.
+ fn set_env_vars_os(
+ self,
+ vars: impl IntoIterator<Item = (impl AsRef<OsStr>, impl AsRef<OsStr>)>,
+ ) -> OneShotRequest<C>;
+
+ /// Changes the process title of the server.
+ fn set_process_name(self, name: impl AsRef<OsStr>) -> OneShotRequest<C>;
+
+ /// Changes the umask of the server process.
+ fn set_umask(self, mask: u32) -> OneShotRequest<C>;
/// Runs the specified Mercurial command with cHg extension.
- fn run_command_chg<I, P, H>(self, handler: H, args: I) -> ChgRunCommand<C, H>
+ fn run_command_chg<H>(
+ self,
+ handler: H,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> ChgRunCommand<C, H>
where
- I: IntoIterator<Item = P>,
- P: AsRef<OsStr>,
H: SystemHandler;
+
+ /// Validates if the server can run Mercurial commands with the expected
+ /// configuration.
+ ///
+ /// The `args` should contain early command arguments such as `--config`
+ /// and `-R`.
+ ///
+ /// Client-side environment must be sent prior to this request, by
+ /// `set_current_dir()` and `set_env_vars_os()`.
+ fn validate(
+ self,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> OneShotQuery<C, fn(Bytes) -> io::Result<Vec<Instruction>>>;
}
impl<C> ChgClientExt<C> for Client<C>
@@ -54,19 +82,47 @@
AttachIo::with_client(self, stdin, stdout, Some(stderr))
}
- fn set_current_dir<P>(self, dir: P) -> OneShotRequest<C>
- where
- P: AsRef<Path>,
- {
+ fn set_current_dir(self, dir: impl AsRef<Path>) -> OneShotRequest<C> {
OneShotRequest::start_with_args(self, b"chdir", dir.as_ref().as_os_str().as_bytes())
}
- fn run_command_chg<I, P, H>(self, handler: H, args: I) -> ChgRunCommand<C, H>
+ fn set_env_vars_os(
+ self,
+ vars: impl IntoIterator<Item = (impl AsRef<OsStr>, impl AsRef<OsStr>)>,
+ ) -> OneShotRequest<C> {
+ OneShotRequest::start_with_args(self, b"setenv", message::pack_env_vars_os(vars))
+ }
+
+ fn set_process_name(self, name: impl AsRef<OsStr>) -> OneShotRequest<C> {
+ OneShotRequest::start_with_args(self, b"setprocname", name.as_ref().as_bytes())
+ }
+
+ fn set_umask(self, mask: u32) -> OneShotRequest<C> {
+ let mut args = BytesMut::with_capacity(mem::size_of_val(&mask));
+ args.put_u32_be(mask);
+ OneShotRequest::start_with_args(self, b"setumask2", args)
+ }
+
+ fn run_command_chg<H>(
+ self,
+ handler: H,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> ChgRunCommand<C, H>
where
- I: IntoIterator<Item = P>,
- P: AsRef<OsStr>,
H: SystemHandler,
{
ChgRunCommand::with_client(self, handler, message::pack_args_os(args))
}
+
+ fn validate(
+ self,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> OneShotQuery<C, fn(Bytes) -> io::Result<Vec<Instruction>>> {
+ OneShotQuery::start_with_args(
+ self,
+ b"validate",
+ message::pack_args_os(args),
+ message::parse_instructions,
+ )
+ }
}
--- a/rust/chg/src/lib.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/lib.rs Thu Apr 16 22:51:09 2020 +0530
@@ -3,14 +3,6 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-extern crate bytes;
-#[macro_use]
-extern crate futures;
-extern crate libc;
-extern crate tokio;
-extern crate tokio_hglib;
-extern crate tokio_process;
-
mod attachio;
mod clientext;
pub mod locator;
--- a/rust/chg/src/locator.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/locator.rs Thu Apr 16 22:51:09 2020 +0530
@@ -5,6 +5,8 @@
//! Utility for locating command-server process.
+use futures::future::{self, Either, Loop};
+use log::debug;
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::{self, DirBuilder};
@@ -12,19 +14,36 @@
use std::os::unix::ffi::{OsStrExt, OsStringExt};
use std::os::unix::fs::{DirBuilderExt, MetadataExt};
use std::path::{Path, PathBuf};
-use std::process;
+use std::process::{self, Command};
use std::time::Duration;
+use tokio::prelude::*;
+use tokio_hglib::UnixClient;
+use tokio_process::{Child, CommandExt};
+use tokio_timer;
-use super::procutil;
+use crate::clientext::ChgClientExt;
+use crate::message::{Instruction, ServerSpec};
+use crate::procutil;
+
+const REQUIRED_SERVER_CAPABILITIES: &[&str] = &[
+ "attachio",
+ "chdir",
+ "runcommand",
+ "setenv",
+ "setumask2",
+ "validate",
+];
/// Helper to connect to and spawn a server process.
#[derive(Clone, Debug)]
pub struct Locator {
hg_command: OsString,
+ hg_early_args: Vec<OsString>,
current_dir: PathBuf,
env_vars: Vec<(OsString, OsString)>,
process_id: u32,
base_sock_path: PathBuf,
+ redirect_sock_path: Option<PathBuf>,
timeout: Duration,
}
@@ -36,10 +55,12 @@
pub fn prepare_from_env() -> io::Result<Locator> {
Ok(Locator {
hg_command: default_hg_command(),
+ hg_early_args: Vec::new(),
current_dir: env::current_dir()?,
env_vars: env::vars_os().collect(),
process_id: process::id(),
base_sock_path: prepare_server_socket_path()?,
+ redirect_sock_path: None,
timeout: default_timeout(),
})
}
@@ -47,18 +68,244 @@
/// Temporary socket path for this client process.
fn temp_sock_path(&self) -> PathBuf {
let src = self.base_sock_path.as_os_str().as_bytes();
- let mut buf = Vec::with_capacity(src.len() + 6);
+ let mut buf = Vec::with_capacity(src.len() + 6); // "{src}.{pid}".len()
buf.extend_from_slice(src);
buf.extend_from_slice(format!(".{}", self.process_id).as_bytes());
OsString::from_vec(buf).into()
}
+
+ /// Specifies the arguments to be passed to the server at start.
+ pub fn set_early_args(&mut self, args: impl IntoIterator<Item = impl AsRef<OsStr>>) {
+ self.hg_early_args = args.into_iter().map(|a| a.as_ref().to_owned()).collect();
+ }
+
+ /// Connects to the server.
+ ///
+ /// The server process will be spawned if not running.
+ pub fn connect(self) -> impl Future<Item = (Self, UnixClient), Error = io::Error> {
+ future::loop_fn((self, 0), |(loc, cnt)| {
+ if cnt < 10 {
+ let fut = loc
+ .try_connect()
+ .and_then(|(loc, client)| {
+ client
+ .validate(&loc.hg_early_args)
+ .map(|(client, instructions)| (loc, client, instructions))
+ })
+ .and_then(move |(loc, client, instructions)| {
+ loc.run_instructions(client, instructions, cnt)
+ });
+ Either::A(fut)
+ } else {
+ let msg = format!(
+ concat!(
+ "too many redirections.\n",
+ "Please make sure {:?} is not a wrapper which ",
+ "changes sensitive environment variables ",
+ "before executing hg. If you have to use a ",
+ "wrapper, wrap chg instead of hg.",
+ ),
+ loc.hg_command
+ );
+ Either::B(future::err(io::Error::new(io::ErrorKind::Other, msg)))
+ }
+ })
+ }
+
+ /// Runs instructions received from the server.
+ fn run_instructions(
+ mut self,
+ client: UnixClient,
+ instructions: Vec<Instruction>,
+ cnt: usize,
+ ) -> io::Result<Loop<(Self, UnixClient), (Self, usize)>> {
+ let mut reconnect = false;
+ for inst in instructions {
+ debug!("instruction: {:?}", inst);
+ match inst {
+ Instruction::Exit(_) => {
+ // Just returns the current connection to run the
+ // unparsable command and report the error
+ return Ok(Loop::Break((self, client)));
+ }
+ Instruction::Reconnect => {
+ reconnect = true;
+ }
+ Instruction::Redirect(path) => {
+ if path.parent() != self.base_sock_path.parent() {
+ let msg = format!(
+ "insecure redirect instruction from server: {}",
+ path.display()
+ );
+ return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
+ }
+ self.redirect_sock_path = Some(path);
+ reconnect = true;
+ }
+ Instruction::Unlink(path) => {
+ if path.parent() != self.base_sock_path.parent() {
+ let msg = format!(
+ "insecure unlink instruction from server: {}",
+ path.display()
+ );
+ return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
+ }
+ fs::remove_file(path).unwrap_or(()); // may race
+ }
+ }
+ }
+
+ if reconnect {
+ Ok(Loop::Continue((self, cnt + 1)))
+ } else {
+ Ok(Loop::Break((self, client)))
+ }
+ }
+
+ /// Tries to connect to the existing server, or spawns new if not running.
+ fn try_connect(self) -> impl Future<Item = (Self, UnixClient), Error = io::Error> {
+ let sock_path = self
+ .redirect_sock_path
+ .as_ref()
+ .unwrap_or(&self.base_sock_path)
+ .clone();
+ debug!("try connect to {}", sock_path.display());
+ UnixClient::connect(sock_path)
+ .then(|res| {
+ match res {
+ Ok(client) => Either::A(future::ok((self, client))),
+ Err(_) => {
+ // Prevent us from being re-connected to the outdated
+ // master server: We were told by the server to redirect
+ // to redirect_sock_path, which didn't work. We do not
+ // want to connect to the same master server again
+ // because it would probably tell us the same thing.
+ if self.redirect_sock_path.is_some() {
+ fs::remove_file(&self.base_sock_path).unwrap_or(());
+ // may race
+ }
+ Either::B(self.spawn_connect())
+ }
+ }
+ })
+ .and_then(|(loc, client)| {
+ check_server_capabilities(client.server_spec())?;
+ Ok((loc, client))
+ })
+ .and_then(|(loc, client)| {
+ // It's purely optional, and the server might not support this command.
+ if client.server_spec().capabilities.contains("setprocname") {
+ let fut = client
+ .set_process_name(format!("chg[worker/{}]", loc.process_id))
+ .map(|client| (loc, client));
+ Either::A(fut)
+ } else {
+ Either::B(future::ok((loc, client)))
+ }
+ })
+ .and_then(|(loc, client)| {
+ client
+ .set_current_dir(&loc.current_dir)
+ .map(|client| (loc, client))
+ })
+ .and_then(|(loc, client)| {
+ client
+ .set_env_vars_os(loc.env_vars.iter().cloned())
+ .map(|client| (loc, client))
+ })
+ }
+
+ /// Spawns new server process and connects to it.
+ ///
+ /// The server will be spawned at the current working directory, then
+ /// chdir to "/", so that the server will load configs from the target
+ /// repository.
+ fn spawn_connect(self) -> impl Future<Item = (Self, UnixClient), Error = io::Error> {
+ let sock_path = self.temp_sock_path();
+ debug!("start cmdserver at {}", sock_path.display());
+ Command::new(&self.hg_command)
+ .arg("serve")
+ .arg("--cmdserver")
+ .arg("chgunix")
+ .arg("--address")
+ .arg(&sock_path)
+ .arg("--daemon-postexec")
+ .arg("chdir:/")
+ .args(&self.hg_early_args)
+ .current_dir(&self.current_dir)
+ .env_clear()
+ .envs(self.env_vars.iter().cloned())
+ .env("CHGINTERNALMARK", "")
+ .spawn_async()
+ .into_future()
+ .and_then(|server| self.connect_spawned(server, sock_path))
+ .and_then(|(loc, client, sock_path)| {
+ debug!(
+ "rename {} to {}",
+ sock_path.display(),
+ loc.base_sock_path.display()
+ );
+ fs::rename(&sock_path, &loc.base_sock_path)?;
+ Ok((loc, client))
+ })
+ }
+
+ /// Tries to connect to the just spawned server repeatedly until timeout
+ /// exceeded.
+ fn connect_spawned(
+ self,
+ server: Child,
+ sock_path: PathBuf,
+ ) -> impl Future<Item = (Self, UnixClient, PathBuf), Error = io::Error> {
+ debug!("try connect to {} repeatedly", sock_path.display());
+ let connect = future::loop_fn(sock_path, |sock_path| {
+ UnixClient::connect(sock_path.clone()).then(|res| {
+ match res {
+ Ok(client) => Either::A(future::ok(Loop::Break((client, sock_path)))),
+ Err(_) => {
+ // try again with slight delay
+ let fut = tokio_timer::sleep(Duration::from_millis(10))
+ .map(|()| Loop::Continue(sock_path))
+ .map_err(|err| io::Error::new(io::ErrorKind::Other, err));
+ Either::B(fut)
+ }
+ }
+ })
+ });
+
+ // waits for either connection established or server failed to start
+ connect
+ .select2(server)
+ .map_err(|res| res.split().0)
+ .timeout(self.timeout)
+ .map_err(|err| {
+ err.into_inner().unwrap_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::TimedOut,
+ "timed out while connecting to server",
+ )
+ })
+ })
+ .and_then(|res| {
+ match res {
+ Either::A(((client, sock_path), server)) => {
+ server.forget(); // continue to run in background
+ Ok((self, client, sock_path))
+ }
+ Either::B((st, _)) => Err(io::Error::new(
+ io::ErrorKind::Other,
+ format!("server exited too early: {}", st),
+ )),
+ }
+ })
+ }
}
/// Determines the server socket to connect to.
///
/// If no `$CHGSOCKNAME` is specified, the socket directory will be created
/// as necessary.
-pub fn prepare_server_socket_path() -> io::Result<PathBuf> {
+fn prepare_server_socket_path() -> io::Result<PathBuf> {
if let Some(s) = env::var_os("CHGSOCKNAME") {
Ok(PathBuf::from(s))
} else {
@@ -107,10 +354,7 @@
/// Creates a directory which the other users cannot access to.
///
/// If the directory already exists, tests its permission.
-fn create_secure_dir<P>(path: P) -> io::Result<()>
-where
- P: AsRef<Path>,
-{
+fn create_secure_dir(path: impl AsRef<Path>) -> io::Result<()> {
DirBuilder::new()
.mode(0o700)
.create(path.as_ref())
@@ -134,3 +378,113 @@
Err(io::Error::new(io::ErrorKind::Other, "insecure directory"))
}
}
+
+fn check_server_capabilities(spec: &ServerSpec) -> io::Result<()> {
+ let unsupported: Vec<_> = REQUIRED_SERVER_CAPABILITIES
+ .iter()
+ .cloned()
+ .filter(|&s| !spec.capabilities.contains(s))
+ .collect();
+ if unsupported.is_empty() {
+ Ok(())
+ } else {
+ let msg = format!(
+ "insufficient server capabilities: {}",
+ unsupported.join(", ")
+ );
+ Err(io::Error::new(io::ErrorKind::Other, msg))
+ }
+}
+
+/// Collects arguments which need to be passed to the server at start.
+pub fn collect_early_args(args: impl IntoIterator<Item = impl AsRef<OsStr>>) -> Vec<OsString> {
+ let mut args_iter = args.into_iter();
+ let mut early_args = Vec::new();
+ while let Some(arg) = args_iter.next() {
+ let argb = arg.as_ref().as_bytes();
+ if argb == b"--" {
+ break;
+ } else if argb.starts_with(b"--") {
+ let mut split = argb[2..].splitn(2, |&c| c == b'=');
+ match split.next().unwrap() {
+ b"traceback" => {
+ if split.next().is_none() {
+ early_args.push(arg.as_ref().to_owned());
+ }
+ }
+ b"config" | b"cwd" | b"repo" | b"repository" => {
+ if split.next().is_some() {
+ // --<flag>=<val>
+ early_args.push(arg.as_ref().to_owned());
+ } else {
+ // --<flag> <val>
+ args_iter.next().map(|val| {
+ early_args.push(arg.as_ref().to_owned());
+ early_args.push(val.as_ref().to_owned());
+ });
+ }
+ }
+ _ => {}
+ }
+ } else if argb.starts_with(b"-R") {
+ if argb.len() > 2 {
+ // -R<val>
+ early_args.push(arg.as_ref().to_owned());
+ } else {
+ // -R <val>
+ args_iter.next().map(|val| {
+ early_args.push(arg.as_ref().to_owned());
+ early_args.push(val.as_ref().to_owned());
+ });
+ }
+ }
+ }
+
+ early_args
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn collect_early_args_some() {
+ assert!(collect_early_args(&[] as &[&OsStr]).is_empty());
+ assert!(collect_early_args(&["log"]).is_empty());
+ assert_eq!(
+ collect_early_args(&["log", "-Ra", "foo"]),
+ os_string_vec_from(&[b"-Ra"])
+ );
+ assert_eq!(
+ collect_early_args(&["log", "-R", "repo", "", "--traceback", "a"]),
+ os_string_vec_from(&[b"-R", b"repo", b"--traceback"])
+ );
+ assert_eq!(
+ collect_early_args(&["log", "--config", "diff.git=1", "-q"]),
+ os_string_vec_from(&[b"--config", b"diff.git=1"])
+ );
+ assert_eq!(
+ collect_early_args(&["--cwd=..", "--repository", "r", "log"]),
+ os_string_vec_from(&[b"--cwd=..", b"--repository", b"r"])
+ );
+ assert_eq!(
+ collect_early_args(&["log", "--repo=r", "--repos", "a"]),
+ os_string_vec_from(&[b"--repo=r"])
+ );
+ }
+
+ #[test]
+ fn collect_early_args_orphaned() {
+ assert!(collect_early_args(&["log", "-R"]).is_empty());
+ assert!(collect_early_args(&["log", "--config"]).is_empty());
+ }
+
+ #[test]
+ fn collect_early_args_unwanted_value() {
+ assert!(collect_early_args(&["log", "--traceback="]).is_empty());
+ }
+
+ fn os_string_vec_from(v: &[&[u8]]) -> Vec<OsString> {
+ v.iter().map(|s| OsStr::from_bytes(s).to_owned()).collect()
+ }
+}
--- a/rust/chg/src/main.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/main.rs Thu Apr 16 22:51:09 2020 +0530
@@ -3,13 +3,7 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-extern crate chg;
-extern crate futures;
-extern crate log;
-extern crate tokio;
-extern crate tokio_hglib;
-
-use chg::locator;
+use chg::locator::{self, Locator};
use chg::procutil;
use chg::{ChgClientExt, ChgUiHandler};
use futures::sync::oneshot;
@@ -18,7 +12,6 @@
use std::process;
use std::time::Instant;
use tokio::prelude::*;
-use tokio_hglib::UnixClient;
struct DebugLogger {
start: Instant,
@@ -64,21 +57,25 @@
log::set_max_level(log::LevelFilter::Debug);
}
- let code = run().unwrap_or_else(|err| {
+ // TODO: add loop detection by $CHGINTERNALMARK
+
+ let umask = unsafe { procutil::get_umask() }; // not thread safe
+ let code = run(umask).unwrap_or_else(|err| {
writeln!(io::stderr(), "chg: abort: {}", err).unwrap_or(());
255
});
process::exit(code);
}
-fn run() -> io::Result<i32> {
- let current_dir = env::current_dir()?;
- let sock_path = locator::prepare_server_socket_path()?;
+fn run(umask: u32) -> io::Result<i32> {
+ let mut loc = Locator::prepare_from_env()?;
+ loc.set_early_args(locator::collect_early_args(env::args_os().skip(1)));
let handler = ChgUiHandler::new();
let (result_tx, result_rx) = oneshot::channel();
- let fut = UnixClient::connect(sock_path)
- .and_then(|client| client.set_current_dir(current_dir))
- .and_then(|client| client.attach_io(io::stdin(), io::stdout(), io::stderr()))
+ let fut = loc
+ .connect()
+ .and_then(|(_, client)| client.attach_io(io::stdin(), io::stdout(), io::stderr()))
+ .and_then(move |client| client.set_umask(umask))
.and_then(|client| {
let pid = client.server_spec().process_id.unwrap();
let pgid = client.server_spec().process_group_id;
--- a/rust/chg/src/message.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/message.rs Thu Apr 16 22:51:09 2020 +0530
@@ -5,11 +5,12 @@
//! Utility for parsing and building command-server messages.
-use bytes::Bytes;
+use bytes::{BufMut, Bytes, BytesMut};
use std::error;
use std::ffi::{OsStr, OsString};
use std::io;
use std::os::unix::ffi::OsStrExt;
+use std::path::PathBuf;
pub use tokio_hglib::message::*; // re-exports
@@ -67,17 +68,84 @@
}
}
-fn decode_latin1<S>(s: S) -> String
-where
- S: AsRef<[u8]>,
-{
+/// Client-side instruction requested by the server.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum Instruction {
+ Exit(i32),
+ Reconnect,
+ Redirect(PathBuf),
+ Unlink(PathBuf),
+}
+
+/// Parses validation result into instructions.
+pub fn parse_instructions(data: Bytes) -> io::Result<Vec<Instruction>> {
+ let mut instructions = Vec::new();
+ for l in data.split(|&c| c == b'\0') {
+ if l.is_empty() {
+ continue;
+ }
+ let mut s = l.splitn(2, |&c| c == b' ');
+ let inst = match (s.next().unwrap(), s.next()) {
+ (b"exit", Some(arg)) => decode_latin1(arg)
+ .parse()
+ .map(Instruction::Exit)
+ .map_err(|_| new_parse_error(format!("invalid exit code: {:?}", arg)))?,
+ (b"reconnect", None) => Instruction::Reconnect,
+ (b"redirect", Some(arg)) => {
+ Instruction::Redirect(OsStr::from_bytes(arg).to_owned().into())
+ }
+ (b"unlink", Some(arg)) => Instruction::Unlink(OsStr::from_bytes(arg).to_owned().into()),
+ _ => {
+ return Err(new_parse_error(format!("unknown command: {:?}", l)));
+ }
+ };
+ instructions.push(inst);
+ }
+ Ok(instructions)
+}
+
+// allocate large buffer as environment variables can be quite long
+const INITIAL_PACKED_ENV_VARS_CAPACITY: usize = 4096;
+
+/// Packs environment variables of platform encoding into bytes.
+///
+/// # Panics
+///
+/// Panics if key or value contains `\0` character, or key contains '='
+/// character.
+pub fn pack_env_vars_os(
+ vars: impl IntoIterator<Item = (impl AsRef<OsStr>, impl AsRef<OsStr>)>,
+) -> Bytes {
+ let mut vars_iter = vars.into_iter();
+ if let Some((k, v)) = vars_iter.next() {
+ let mut dst = BytesMut::with_capacity(INITIAL_PACKED_ENV_VARS_CAPACITY);
+ pack_env_into(&mut dst, k.as_ref(), v.as_ref());
+ for (k, v) in vars_iter {
+ dst.reserve(1);
+ dst.put_u8(b'\0');
+ pack_env_into(&mut dst, k.as_ref(), v.as_ref());
+ }
+ dst.freeze()
+ } else {
+ Bytes::new()
+ }
+}
+
+fn pack_env_into(dst: &mut BytesMut, k: &OsStr, v: &OsStr) {
+ assert!(!k.as_bytes().contains(&0), "key shouldn't contain NUL");
+ assert!(!k.as_bytes().contains(&b'='), "key shouldn't contain '='");
+ assert!(!v.as_bytes().contains(&0), "value shouldn't contain NUL");
+ dst.reserve(k.as_bytes().len() + 1 + v.as_bytes().len());
+ dst.put_slice(k.as_bytes());
+ dst.put_u8(b'=');
+ dst.put_slice(v.as_bytes());
+}
+
+fn decode_latin1(s: impl AsRef<[u8]>) -> String {
s.as_ref().iter().map(|&c| c as char).collect()
}
-fn new_parse_error<E>(error: E) -> io::Error
-where
- E: Into<Box<error::Error + Send + Sync>>,
-{
+fn new_parse_error(error: impl Into<Box<dyn error::Error + Send + Sync>>) -> io::Error {
io::Error::new(io::ErrorKind::InvalidData, error)
}
@@ -85,6 +153,7 @@
mod tests {
use super::*;
use std::os::unix::ffi::OsStringExt;
+ use std::panic;
#[test]
fn parse_command_spec_good() {
@@ -127,7 +196,114 @@
assert!(parse_command_spec(Bytes::from_static(b"paper\0less")).is_err());
}
+ #[test]
+ fn parse_instructions_good() {
+ let src = [
+ b"exit 123".as_ref(),
+ b"reconnect".as_ref(),
+ b"redirect /whatever".as_ref(),
+ b"unlink /someother".as_ref(),
+ ]
+ .join(&0);
+ let insts = vec![
+ Instruction::Exit(123),
+ Instruction::Reconnect,
+ Instruction::Redirect(path_buf_from(b"/whatever")),
+ Instruction::Unlink(path_buf_from(b"/someother")),
+ ];
+ assert_eq!(parse_instructions(Bytes::from(src)).unwrap(), insts);
+ }
+
+ #[test]
+ fn parse_instructions_empty() {
+ assert_eq!(parse_instructions(Bytes::new()).unwrap(), vec![]);
+ assert_eq!(
+ parse_instructions(Bytes::from_static(b"\0")).unwrap(),
+ vec![]
+ );
+ }
+
+ #[test]
+ fn parse_instructions_malformed_exit_code() {
+ assert!(parse_instructions(Bytes::from_static(b"exit foo")).is_err());
+ }
+
+ #[test]
+ fn parse_instructions_missing_argument() {
+ assert!(parse_instructions(Bytes::from_static(b"exit")).is_err());
+ assert!(parse_instructions(Bytes::from_static(b"redirect")).is_err());
+ assert!(parse_instructions(Bytes::from_static(b"unlink")).is_err());
+ }
+
+ #[test]
+ fn parse_instructions_unknown_command() {
+ assert!(parse_instructions(Bytes::from_static(b"quit 0")).is_err());
+ }
+
+ #[test]
+ fn pack_env_vars_os_good() {
+ assert_eq!(
+ pack_env_vars_os(vec![] as Vec<(OsString, OsString)>),
+ Bytes::new()
+ );
+ assert_eq!(
+ pack_env_vars_os(vec![os_string_pair_from(b"FOO", b"bar")]),
+ Bytes::from_static(b"FOO=bar")
+ );
+ assert_eq!(
+ pack_env_vars_os(vec![
+ os_string_pair_from(b"FOO", b""),
+ os_string_pair_from(b"BAR", b"baz")
+ ]),
+ Bytes::from_static(b"FOO=\0BAR=baz")
+ );
+ }
+
+ #[test]
+ fn pack_env_vars_os_large_key() {
+ let mut buf = vec![b'A'; INITIAL_PACKED_ENV_VARS_CAPACITY];
+ let envs = vec![os_string_pair_from(&buf, b"")];
+ buf.push(b'=');
+ assert_eq!(pack_env_vars_os(envs), Bytes::from(buf));
+ }
+
+ #[test]
+ fn pack_env_vars_os_large_value() {
+ let mut buf = vec![b'A', b'='];
+ buf.resize(INITIAL_PACKED_ENV_VARS_CAPACITY + 1, b'a');
+ let envs = vec![os_string_pair_from(&buf[..1], &buf[2..])];
+ assert_eq!(pack_env_vars_os(envs), Bytes::from(buf));
+ }
+
+ #[test]
+ fn pack_env_vars_os_nul_eq() {
+ assert!(panic::catch_unwind(|| {
+ pack_env_vars_os(vec![os_string_pair_from(b"\0", b"")])
+ })
+ .is_err());
+ assert!(panic::catch_unwind(|| {
+ pack_env_vars_os(vec![os_string_pair_from(b"FOO", b"\0bar")])
+ })
+ .is_err());
+ assert!(panic::catch_unwind(|| {
+ pack_env_vars_os(vec![os_string_pair_from(b"FO=", b"bar")])
+ })
+ .is_err());
+ assert_eq!(
+ pack_env_vars_os(vec![os_string_pair_from(b"FOO", b"=ba")]),
+ Bytes::from_static(b"FOO==ba")
+ );
+ }
+
fn os_string_from(s: &[u8]) -> OsString {
OsString::from_vec(s.to_vec())
}
+
+ fn os_string_pair_from(k: &[u8], v: &[u8]) -> (OsString, OsString) {
+ (os_string_from(k), os_string_from(v))
+ }
+
+ fn path_buf_from(s: &[u8]) -> PathBuf {
+ os_string_from(s).into()
+ }
}
--- a/rust/chg/src/procutil.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/procutil.rs Thu Apr 16 22:51:09 2020 +0530
@@ -25,6 +25,19 @@
unsafe { libc::geteuid() }
}
+/// Returns the umask of the current process.
+///
+/// # Safety
+///
+/// This is unsafe because the umask value is temporarily changed, and
+/// the change can be observed from the other threads. Don't call this in
+/// multi-threaded context.
+pub unsafe fn get_umask() -> u32 {
+ let mask = libc::umask(0);
+ libc::umask(mask);
+ mask
+}
+
/// Changes the given fd to blocking mode.
pub fn set_blocking_fd(fd: RawFd) -> io::Result<()> {
let flags = unsafe { libc::fcntl(fd, libc::F_GETFL) };
--- a/rust/chg/src/runcommand.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/runcommand.rs Thu Apr 16 22:51:09 2020 +0530
@@ -15,9 +15,9 @@
use tokio_hglib::protocol::MessageLoop;
use tokio_hglib::{Client, Connection};
-use super::attachio::AttachIo;
-use super::message::{self, CommandType};
-use super::uihandler::SystemHandler;
+use crate::attachio::AttachIo;
+use crate::message::{self, CommandType};
+use crate::uihandler::SystemHandler;
enum AsyncS<R, S> {
Ready(R),
@@ -37,7 +37,7 @@
Finished,
}
-type CommandPoll<C, H> = io::Result<(AsyncS<(Client<C>, H, i32), CommandState<C, H>>)>;
+type CommandPoll<C, H> = io::Result<AsyncS<(Client<C>, H, i32), CommandState<C, H>>>;
/// Future resolves to `(exit_code, client)`.
#[must_use = "futures do nothing unless polled"]
@@ -140,30 +140,31 @@
C: Connection,
H: SystemHandler,
{
- match msg {
- ChannelMessage::Data(b'r', data) => {
- let code = message::parse_result_code(data)?;
- Ok(AsyncS::Ready((client, handler, code)))
- }
- ChannelMessage::Data(..) => {
- // just ignores data sent to optional channel
- let msg_loop = MessageLoop::resume(client);
- Ok(AsyncS::PollAgain(CommandState::Running(msg_loop, handler)))
- }
- ChannelMessage::InputRequest(..) | ChannelMessage::LineRequest(..) => Err(io::Error::new(
- io::ErrorKind::InvalidData,
- "unsupported request",
- )),
- ChannelMessage::SystemRequest(data) => {
- let (cmd_type, cmd_spec) = message::parse_command_spec(data)?;
- match cmd_type {
- CommandType::Pager => {
- let fut = handler.spawn_pager(cmd_spec).into_future();
- Ok(AsyncS::PollAgain(CommandState::SpawningPager(client, fut)))
- }
- CommandType::System => {
- let fut = handler.run_system(cmd_spec).into_future();
- Ok(AsyncS::PollAgain(CommandState::WaitingSystem(client, fut)))
+ {
+ match msg {
+ ChannelMessage::Data(b'r', data) => {
+ let code = message::parse_result_code(data)?;
+ Ok(AsyncS::Ready((client, handler, code)))
+ }
+ ChannelMessage::Data(..) => {
+ // just ignores data sent to optional channel
+ let msg_loop = MessageLoop::resume(client);
+ Ok(AsyncS::PollAgain(CommandState::Running(msg_loop, handler)))
+ }
+ ChannelMessage::InputRequest(..) | ChannelMessage::LineRequest(..) => Err(
+ io::Error::new(io::ErrorKind::InvalidData, "unsupported request"),
+ ),
+ ChannelMessage::SystemRequest(data) => {
+ let (cmd_type, cmd_spec) = message::parse_command_spec(data)?;
+ match cmd_type {
+ CommandType::Pager => {
+ let fut = handler.spawn_pager(cmd_spec).into_future();
+ Ok(AsyncS::PollAgain(CommandState::SpawningPager(client, fut)))
+ }
+ CommandType::System => {
+ let fut = handler.run_system(cmd_spec).into_future();
+ Ok(AsyncS::PollAgain(CommandState::WaitingSystem(client, fut)))
+ }
}
}
}
--- a/rust/chg/src/uihandler.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/chg/src/uihandler.rs Thu Apr 16 22:51:09 2020 +0530
@@ -12,8 +12,8 @@
use tokio;
use tokio_process::{ChildStdin, CommandExt};
-use super::message::CommandSpec;
-use super::procutil;
+use crate::message::CommandSpec;
+use crate::procutil;
/// Callback to process shell command requests received from server.
pub trait SystemHandler: Sized {
--- a/rust/hg-core/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/Cargo.toml Thu Apr 16 22:51:09 2020 +0530
@@ -4,16 +4,37 @@
authors = ["Georges Racinet <gracinet@anybox.fr>"]
description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
edition = "2018"
+build = "build.rs"
[lib]
name = "hg"
[dependencies]
-byteorder = "1.3.1"
-lazy_static = "1.3.0"
-memchr = "2.2.0"
-rand = "0.6.5"
-rand_pcg = "0.1.1"
-rayon = "1.2.0"
-regex = "1.1.0"
+byteorder = "1.3.4"
+hex = "0.4.2"
+lazy_static = "1.4.0"
+libc = { version = "0.2.66", optional = true }
+memchr = "2.3.3"
+rand = "0.7.3"
+rand_pcg = "0.2.1"
+rand_distr = "0.2.2"
+rayon = "1.3.0"
+regex = "1.3.6"
twox-hash = "1.5.0"
+same-file = "1.0.6"
+crossbeam = "0.7.3"
+micro-timer = "0.2.1"
+log = "0.4.8"
+
+[dev-dependencies]
+clap = "*"
+memmap = "0.7.0"
+pretty_assertions = "0.6.1"
+tempfile = "3.1.0"
+
+[build-dependencies]
+cc = { version = "1.0.48", optional = true }
+
+[features]
+default = []
+with-re2 = ["cc", "libc"]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/build.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,61 @@
+// build.rs
+//
+// Copyright 2020 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+#[cfg(feature = "with-re2")]
+use cc;
+
+/// Uses either the system Re2 install as a dynamic library or the provided
+/// build as a static library
+#[cfg(feature = "with-re2")]
+fn compile_re2() {
+ use cc;
+ use std::path::Path;
+ use std::process::exit;
+
+ let msg = r"HG_RE2_PATH must be one of `system|<path to build source clone of Re2>`";
+ let re2 = match std::env::var_os("HG_RE2_PATH") {
+ None => {
+ eprintln!("{}", msg);
+ exit(1)
+ }
+ Some(v) => {
+ if v == "system" {
+ None
+ } else {
+ Some(v)
+ }
+ }
+ };
+
+ let mut options = cc::Build::new();
+ options
+ .cpp(true)
+ .flag("-std=c++11")
+ .file("src/re2/rust_re2.cpp");
+
+ if let Some(ref source) = re2 {
+ options.include(Path::new(source));
+ };
+
+ options.compile("librustre.a");
+
+ if let Some(ref source) = &re2 {
+ // Link the local source statically
+ println!(
+ "cargo:rustc-link-search=native={}",
+ Path::new(source).join(Path::new("obj")).display()
+ );
+ println!("cargo:rustc-link-lib=static=re2");
+ } else {
+ println!("cargo:rustc-link-lib=re2");
+ }
+}
+
+fn main() {
+ #[cfg(feature = "with-re2")]
+ compile_re2();
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/examples/nodemap/index.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,95 @@
+// Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Minimal `RevlogIndex`, readable from standard Mercurial file format
+use hg::*;
+use memmap::*;
+use std::fs::File;
+use std::ops::Deref;
+use std::path::Path;
+use std::slice;
+
+pub struct Index {
+ data: Box<dyn Deref<Target = [IndexEntry]> + Send>,
+}
+
+/// A fixed sized index entry. All numbers are big endian
+#[repr(C)]
+pub struct IndexEntry {
+ not_used_yet: [u8; 24],
+ p1: Revision,
+ p2: Revision,
+ node: Node,
+ unused_node: [u8; 12],
+}
+
+pub const INDEX_ENTRY_SIZE: usize = 64;
+
+impl IndexEntry {
+ fn parents(&self) -> [Revision; 2] {
+ [Revision::from_be(self.p1), Revision::from_be(self.p1)]
+ }
+}
+
+impl RevlogIndex for Index {
+ fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ fn node(&self, rev: Revision) -> Option<&Node> {
+ if rev == NULL_REVISION {
+ return None;
+ }
+ let i = rev as usize;
+ if i >= self.len() {
+ None
+ } else {
+ Some(&self.data[i].node)
+ }
+ }
+}
+
+impl Graph for &Index {
+ fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
+ let [p1, p2] = (*self).data[rev as usize].parents();
+ let len = (*self).len();
+ if p1 < NULL_REVISION
+ || p2 < NULL_REVISION
+ || p1 as usize >= len
+ || p2 as usize >= len
+ {
+ return Err(GraphError::ParentOutOfRange(rev));
+ }
+ Ok([p1, p2])
+ }
+}
+
+struct IndexMmap(Mmap);
+
+impl Deref for IndexMmap {
+ type Target = [IndexEntry];
+
+ fn deref(&self) -> &[IndexEntry] {
+ let ptr = self.0.as_ptr() as *const IndexEntry;
+ // Any misaligned data will be ignored.
+ debug_assert_eq!(
+ self.0.len() % std::mem::align_of::<IndexEntry>(),
+ 0,
+ "Misaligned data in mmap"
+ );
+ unsafe { slice::from_raw_parts(ptr, self.0.len() / INDEX_ENTRY_SIZE) }
+ }
+}
+
+impl Index {
+ pub fn load_mmap(path: impl AsRef<Path>) -> Self {
+ let file = File::open(path).unwrap();
+ let msg = "Index file is missing, or missing permission";
+ let mmap = unsafe { MmapOptions::new().map(&file) }.expect(msg);
+ Self {
+ data: Box::new(IndexMmap(mmap)),
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/examples/nodemap/main.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,146 @@
+// Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use clap::*;
+use hg::revlog::node::*;
+use hg::revlog::nodemap::*;
+use hg::revlog::*;
+use memmap::MmapOptions;
+use rand::Rng;
+use std::fs::File;
+use std::io;
+use std::io::Write;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+use std::time::Instant;
+
+mod index;
+use index::Index;
+
+fn mmap_index(repo_path: &Path) -> Index {
+ let mut path = PathBuf::from(repo_path);
+ path.extend([".hg", "store", "00changelog.i"].iter());
+ Index::load_mmap(path)
+}
+
+fn mmap_nodemap(path: &Path) -> NodeTree {
+ let file = File::open(path).unwrap();
+ let mmap = unsafe { MmapOptions::new().map(&file).unwrap() };
+ let len = mmap.len();
+ NodeTree::load_bytes(Box::new(mmap), len)
+}
+
+/// Scan the whole index and create the corresponding nodemap file at `path`
+fn create(index: &Index, path: &Path) -> io::Result<()> {
+ let mut file = File::create(path)?;
+ let start = Instant::now();
+ let mut nm = NodeTree::default();
+ for rev in 0..index.len() {
+ let rev = rev as Revision;
+ nm.insert(index, index.node(rev).unwrap(), rev).unwrap();
+ }
+ eprintln!("Nodemap constructed in RAM in {:?}", start.elapsed());
+ file.write(&nm.into_readonly_and_added_bytes().1)?;
+ eprintln!("Nodemap written to disk");
+ Ok(())
+}
+
+fn query(index: &Index, nm: &NodeTree, prefix: &str) {
+ let start = Instant::now();
+ let res = nm.find_hex(index, prefix);
+ println!("Result found in {:?}: {:?}", start.elapsed(), res);
+}
+
+fn bench(index: &Index, nm: &NodeTree, queries: usize) {
+ let len = index.len() as u32;
+ let mut rng = rand::thread_rng();
+ let nodes: Vec<Node> = (0..queries)
+ .map(|_| {
+ index
+ .node((rng.gen::<u32>() % len) as Revision)
+ .unwrap()
+ .clone()
+ })
+ .collect();
+ if queries < 10 {
+ let nodes_hex: Vec<String> =
+ nodes.iter().map(|n| n.encode_hex()).collect();
+ println!("Nodes: {:?}", nodes_hex);
+ }
+ let mut last: Option<Revision> = None;
+ let start = Instant::now();
+ for node in nodes.iter() {
+ last = nm.find_bin(index, node.into()).unwrap();
+ }
+ let elapsed = start.elapsed();
+ println!(
+ "Did {} queries in {:?} (mean {:?}), last was {:?} with result {:?}",
+ queries,
+ elapsed,
+ elapsed / (queries as u32),
+ nodes.last().unwrap().encode_hex(),
+ last
+ );
+}
+
+fn main() {
+ let matches = App::new("Nodemap pure Rust example")
+ .arg(
+ Arg::with_name("REPOSITORY")
+ .help("Path to the repository, always necessary for its index")
+ .required(true),
+ )
+ .arg(
+ Arg::with_name("NODEMAP_FILE")
+ .help("Path to the nodemap file, independent of REPOSITORY")
+ .required(true),
+ )
+ .subcommand(
+ SubCommand::with_name("create")
+ .about("Create NODEMAP_FILE by scanning repository index"),
+ )
+ .subcommand(
+ SubCommand::with_name("query")
+ .about("Query NODEMAP_FILE for PREFIX")
+ .arg(Arg::with_name("PREFIX").required(true)),
+ )
+ .subcommand(
+ SubCommand::with_name("bench")
+ .about(
+ "Perform #QUERIES random successful queries on NODEMAP_FILE")
+ .arg(Arg::with_name("QUERIES").required(true)),
+ )
+ .get_matches();
+
+ let repo = matches.value_of("REPOSITORY").unwrap();
+ let nm_path = matches.value_of("NODEMAP_FILE").unwrap();
+
+ let index = mmap_index(&Path::new(repo));
+
+ if let Some(_) = matches.subcommand_matches("create") {
+ println!("Creating nodemap file {} for repository {}", nm_path, repo);
+ create(&index, &Path::new(nm_path)).unwrap();
+ return;
+ }
+
+ let nm = mmap_nodemap(&Path::new(nm_path));
+ if let Some(matches) = matches.subcommand_matches("query") {
+ let prefix = matches.value_of("PREFIX").unwrap();
+ println!(
+ "Querying {} in nodemap file {} of repository {}",
+ prefix, nm_path, repo
+ );
+ query(&index, &nm, prefix);
+ }
+ if let Some(matches) = matches.subcommand_matches("bench") {
+ let queries =
+ usize::from_str(matches.value_of("QUERIES").unwrap()).unwrap();
+ println!(
+ "Doing {} random queries in nodemap file {} of repository {}",
+ queries, nm_path, repo
+ );
+ bench(&index, &nm, queries);
+ }
+}
--- a/rust/hg-core/src/dirstate/dirs_multiset.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/dirstate/dirs_multiset.rs Thu Apr 16 22:51:09 2020 +0530
@@ -8,12 +8,15 @@
//! A multiset of directory names.
//!
//! Used to counts the references to directories in a manifest or dirstate.
-use crate::utils::hg_path::{HgPath, HgPathBuf};
use crate::{
- dirstate::EntryState, utils::files, DirstateEntry, DirstateMapError,
- FastHashMap,
+ dirstate::EntryState,
+ utils::{
+ files,
+ hg_path::{HgPath, HgPathBuf, HgPathError},
+ },
+ DirstateEntry, DirstateMapError, FastHashMap,
};
-use std::collections::hash_map::{self, Entry};
+use std::collections::{hash_map, hash_map::Entry, HashMap, HashSet};
// could be encapsulated if we care API stability more seriously
pub type DirsMultisetIter<'a> = hash_map::Keys<'a, HgPathBuf, u32>;
@@ -75,7 +78,14 @@
if subpath.as_bytes().last() == Some(&b'/') {
// TODO Remove this once PathAuditor is certified
// as the only entrypoint for path data
- return Err(DirstateMapError::ConsecutiveSlashes);
+ let second_slash_index = subpath.len() - 1;
+
+ return Err(DirstateMapError::InvalidPath(
+ HgPathError::ConsecutiveSlashes {
+ bytes: path.as_ref().as_bytes().to_owned(),
+ second_slash_index,
+ },
+ ));
}
if let Some(val) = self.inner.get_mut(subpath) {
*val += 1;
@@ -129,6 +139,68 @@
}
}
+/// This is basically a reimplementation of `DirsMultiset` that stores the
+/// children instead of just a count of them, plus a small optional
+/// optimization to avoid some directories we don't need.
+#[derive(PartialEq, Debug)]
+pub struct DirsChildrenMultiset<'a> {
+ inner: FastHashMap<&'a HgPath, HashSet<&'a HgPath>>,
+ only_include: Option<HashSet<&'a HgPath>>,
+}
+
+impl<'a> DirsChildrenMultiset<'a> {
+ pub fn new(
+ paths: impl Iterator<Item = &'a HgPathBuf>,
+ only_include: Option<&'a HashSet<impl AsRef<HgPath> + 'a>>,
+ ) -> Self {
+ let mut new = Self {
+ inner: HashMap::default(),
+ only_include: only_include
+ .map(|s| s.iter().map(|p| p.as_ref()).collect()),
+ };
+
+ for path in paths {
+ new.add_path(path)
+ }
+
+ new
+ }
+ fn add_path(&mut self, path: &'a (impl AsRef<HgPath> + 'a)) {
+ if path.as_ref().is_empty() {
+ return;
+ }
+ for (directory, basename) in files::find_dirs_with_base(path.as_ref())
+ {
+ if !self.is_dir_included(directory) {
+ continue;
+ }
+ self.inner
+ .entry(directory)
+ .and_modify(|e| {
+ e.insert(basename);
+ })
+ .or_insert_with(|| {
+ let mut set = HashSet::new();
+ set.insert(basename);
+ set
+ });
+ }
+ }
+ fn is_dir_included(&self, dir: impl AsRef<HgPath>) -> bool {
+ match &self.only_include {
+ None => false,
+ Some(i) => i.contains(dir.as_ref()),
+ }
+ }
+
+ pub fn get(
+ &self,
+ path: impl AsRef<HgPath>,
+ ) -> Option<&HashSet<&'a HgPath>> {
+ self.inner.get(path.as_ref())
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
--- a/rust/hg-core/src/dirstate/dirstate_map.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/dirstate/dirstate_map.rs Thu Apr 16 22:51:09 2020 +0530
@@ -100,16 +100,12 @@
if entry.state != EntryState::Normal || entry.mtime == MTIME_UNSET {
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.insert(filename.to_owned());
}
if entry.size == SIZE_FROM_OTHER_PARENT {
self.get_non_normal_other_parent_entries()
.1
- .as_mut()
- .unwrap()
.insert(filename.to_owned());
}
Ok(())
@@ -152,8 +148,6 @@
);
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.insert(filename.to_owned());
Ok(())
}
@@ -182,8 +176,6 @@
}
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.remove(filename);
Ok(exists)
@@ -211,8 +203,6 @@
if changed {
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.insert(filename.to_owned());
}
}
@@ -224,8 +214,6 @@
) -> bool {
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.remove(key.as_ref())
}
pub fn non_normal_entries_union(
@@ -234,8 +222,6 @@
) -> Vec<HgPathBuf> {
self.get_non_normal_other_parent_entries()
.0
- .as_mut()
- .unwrap()
.union(&other)
.map(|e| e.to_owned())
.collect()
@@ -243,12 +229,31 @@
pub fn get_non_normal_other_parent_entries(
&mut self,
- ) -> (
- &mut Option<HashSet<HgPathBuf>>,
- &mut Option<HashSet<HgPathBuf>>,
- ) {
+ ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
self.set_non_normal_other_parent_entries(false);
- (&mut self.non_normal_set, &mut self.other_parent_set)
+ (
+ self.non_normal_set.as_mut().unwrap(),
+ self.other_parent_set.as_mut().unwrap(),
+ )
+ }
+
+ /// Useful to get immutable references to those sets in contexts where
+ /// you only have an immutable reference to the `DirstateMap`, like when
+ /// sharing references with Python.
+ ///
+ /// TODO, get rid of this along with the other "setter/getter" stuff when
+ /// a nice typestate plan is defined.
+ ///
+ /// # Panics
+ ///
+ /// Will panic if either set is `None`.
+ pub fn get_non_normal_other_parent_entries_panic(
+ &self,
+ ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
+ (
+ self.non_normal_set.as_ref().unwrap(),
+ self.other_parent_set.as_ref().unwrap(),
+ )
}
pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
@@ -440,22 +445,8 @@
.unwrap();
assert_eq!(1, map.len());
- assert_eq!(
- 0,
- map.get_non_normal_other_parent_entries()
- .0
- .as_ref()
- .unwrap()
- .len()
- );
- assert_eq!(
- 0,
- map.get_non_normal_other_parent_entries()
- .1
- .as_ref()
- .unwrap()
- .len()
- );
+ assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
+ assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
}
#[test]
@@ -487,7 +478,7 @@
})
.collect();
- let non_normal = [
+ let mut non_normal = [
b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
]
.iter()
@@ -499,8 +490,8 @@
let entries = map.get_non_normal_other_parent_entries();
assert_eq!(
- (Some(non_normal), Some(other_parent)),
- (entries.0.to_owned(), entries.1.to_owned())
+ (&mut non_normal, &mut other_parent),
+ (entries.0, entries.1)
);
}
}
--- a/rust/hg-core/src/dirstate/parsers.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/dirstate/parsers.rs Thu Apr 16 22:51:09 2020 +0530
@@ -9,6 +9,7 @@
DirstateEntry, DirstatePackError, DirstateParents, DirstateParseError,
};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
+use micro_timer::timed;
use std::convert::{TryFrom, TryInto};
use std::io::Cursor;
use std::time::Duration;
@@ -20,6 +21,7 @@
// TODO parse/pack: is mutate-on-loop better for performance?
+#[timed]
pub fn parse_dirstate(
state_map: &mut StateMap,
copy_map: &mut CopyMap,
--- a/rust/hg-core/src/dirstate/status.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/dirstate/status.rs Thu Apr 16 22:51:09 2020 +0530
@@ -11,20 +11,68 @@
use crate::{
dirstate::SIZE_FROM_OTHER_PARENT,
- matchers::Matcher,
+ filepatterns::PatternFileWarning,
+ matchers::{get_ignore_function, Matcher, VisitChildrenSet},
utils::{
- files::HgMetadata,
- hg_path::{hg_path_to_path_buf, HgPath},
+ files::{find_dirs, HgMetadata},
+ hg_path::{
+ hg_path_to_path_buf, os_string_to_hg_path_buf, HgPath, HgPathBuf,
+ HgPathError,
+ },
+ path_auditor::PathAuditor,
},
- CopyMap, DirstateEntry, DirstateMap, EntryState,
+ CopyMap, DirstateEntry, DirstateMap, EntryState, FastHashMap,
+ PatternError,
};
+use lazy_static::lazy_static;
+use micro_timer::timed;
use rayon::prelude::*;
-use std::collections::HashSet;
-use std::path::Path;
+use std::{
+ borrow::Cow,
+ collections::HashSet,
+ fs::{read_dir, DirEntry},
+ io::ErrorKind,
+ ops::Deref,
+ path::{Path, PathBuf},
+};
+
+/// Wrong type of file from a `BadMatch`
+/// Note: a lot of those don't exist on all platforms.
+#[derive(Debug, Copy, Clone)]
+pub enum BadType {
+ CharacterDevice,
+ BlockDevice,
+ FIFO,
+ Socket,
+ Directory,
+ Unknown,
+}
+
+impl ToString for BadType {
+ fn to_string(&self) -> String {
+ match self {
+ BadType::CharacterDevice => "character device",
+ BadType::BlockDevice => "block device",
+ BadType::FIFO => "fifo",
+ BadType::Socket => "socket",
+ BadType::Directory => "directory",
+ BadType::Unknown => "unknown",
+ }
+ .to_string()
+ }
+}
+
+/// Was explicitly matched but cannot be found/accessed
+#[derive(Debug, Copy, Clone)]
+pub enum BadMatch {
+ OsError(i32),
+ BadType(BadType),
+}
/// Marker enum used to dispatch new status entries into the right collections.
/// Is similar to `crate::EntryState`, but represents the transient state of
/// entries during the lifetime of a command.
+#[derive(Debug, Copy, Clone)]
enum Dispatch {
Unsure,
Modified,
@@ -33,9 +81,22 @@
Deleted,
Clean,
Unknown,
+ Ignored,
+ /// Empty dispatch, the file is not worth listing
+ None,
+ /// Was explicitly matched but cannot be found/accessed
+ Bad(BadMatch),
+ Directory {
+ /// True if the directory used to be a file in the dmap so we can say
+ /// that it's been removed.
+ was_file: bool,
+ },
}
type IoResult<T> = std::io::Result<T>;
+/// `Box<dyn Trait>` is syntactic sugar for `Box<dyn Trait, 'static>`, so add
+/// an explicit lifetime here to not fight `'static` bounds "out of nowhere".
+type IgnoreFnType<'a> = Box<dyn for<'r> Fn(&'r HgPath) -> bool + Sync + 'a>;
/// Dates and times that are outside the 31-bit signed range are compared
/// modulo 2^31. This should prevent hg from behaving badly with very large
@@ -48,15 +109,39 @@
a & i32::max_value() != b & i32::max_value()
}
+/// Return a sorted list containing information about the entries
+/// in the directory.
+///
+/// * `skip_dot_hg` - Return an empty vec if `path` contains a `.hg` directory
+fn list_directory(
+ path: impl AsRef<Path>,
+ skip_dot_hg: bool,
+) -> std::io::Result<Vec<(HgPathBuf, DirEntry)>> {
+ let mut results = vec![];
+ let entries = read_dir(path.as_ref())?;
+
+ for entry in entries {
+ let entry = entry?;
+ let filename = os_string_to_hg_path_buf(entry.file_name())?;
+ let file_type = entry.file_type()?;
+ if skip_dot_hg && filename.as_bytes() == b".hg" && file_type.is_dir() {
+ return Ok(vec![]);
+ } else {
+ results.push((HgPathBuf::from(filename), entry))
+ }
+ }
+
+ results.sort_unstable_by_key(|e| e.0.clone());
+ Ok(results)
+}
+
/// The file corresponding to the dirstate entry was found on the filesystem.
fn dispatch_found(
filename: impl AsRef<HgPath>,
entry: DirstateEntry,
metadata: HgMetadata,
copy_map: &CopyMap,
- check_exec: bool,
- list_clean: bool,
- last_normal_time: i64,
+ options: StatusOptions,
) -> Dispatch {
let DirstateEntry {
state,
@@ -76,7 +161,7 @@
EntryState::Normal => {
let size_changed = mod_compare(size, st_size as i32);
let mode_changed =
- (mode ^ st_mode as i32) & 0o100 != 0o000 && check_exec;
+ (mode ^ st_mode as i32) & 0o100 != 0o000 && options.check_exec;
let metadata_changed = size >= 0 && (size_changed || mode_changed);
let other_parent = size == SIZE_FROM_OTHER_PARENT;
if metadata_changed
@@ -86,17 +171,17 @@
Dispatch::Modified
} else if mod_compare(mtime, st_mtime as i32) {
Dispatch::Unsure
- } else if st_mtime == last_normal_time {
+ } else if st_mtime == options.last_normal_time {
// the file may have just been marked as normal and
// it may have changed in the same second without
// changing its size. This can happen if we quickly
// do multiple commits. Force lookup, so we don't
// miss such a racy file change.
Dispatch::Unsure
- } else if list_clean {
+ } else if options.list_clean {
Dispatch::Clean
} else {
- Dispatch::Unknown
+ Dispatch::None
}
}
EntryState::Merged => Dispatch::Modified,
@@ -120,71 +205,373 @@
}
}
+lazy_static! {
+ static ref DEFAULT_WORK: HashSet<&'static HgPath> = {
+ let mut h = HashSet::new();
+ h.insert(HgPath::new(b""));
+ h
+ };
+}
+
/// Get stat data about the files explicitly specified by match.
/// TODO subrepos
+#[timed]
fn walk_explicit<'a>(
- files: &'a HashSet<&HgPath>,
+ files: Option<&'a HashSet<&HgPath>>,
dmap: &'a DirstateMap,
- root_dir: impl AsRef<Path> + Sync + Send,
- check_exec: bool,
- list_clean: bool,
- last_normal_time: i64,
+ root_dir: impl AsRef<Path> + Sync + Send + 'a,
+ options: StatusOptions,
) -> impl ParallelIterator<Item = IoResult<(&'a HgPath, Dispatch)>> {
- files.par_iter().filter_map(move |filename| {
- // TODO normalization
- let normalized = filename.as_ref();
+ files
+ .unwrap_or(&DEFAULT_WORK)
+ .par_iter()
+ .map(move |filename| {
+ // TODO normalization
+ let normalized = filename.as_ref();
- let buf = match hg_path_to_path_buf(normalized) {
- Ok(x) => x,
- Err(e) => return Some(Err(e.into())),
- };
- let target = root_dir.as_ref().join(buf);
- let st = target.symlink_metadata();
- match st {
- Ok(meta) => {
- let file_type = meta.file_type();
- if file_type.is_file() || file_type.is_symlink() {
- if let Some(entry) = dmap.get(normalized) {
+ let buf = match hg_path_to_path_buf(normalized) {
+ Ok(x) => x,
+ Err(e) => return Some(Err(e.into())),
+ };
+ let target = root_dir.as_ref().join(buf);
+ let st = target.symlink_metadata();
+ let in_dmap = dmap.get(normalized);
+ match st {
+ Ok(meta) => {
+ let file_type = meta.file_type();
+ return if file_type.is_file() || file_type.is_symlink() {
+ if let Some(entry) = in_dmap {
+ return Some(Ok((
+ normalized,
+ dispatch_found(
+ &normalized,
+ *entry,
+ HgMetadata::from_metadata(meta),
+ &dmap.copy_map,
+ options,
+ ),
+ )));
+ }
+ Some(Ok((normalized, Dispatch::Unknown)))
+ } else {
+ if file_type.is_dir() {
+ Some(Ok((
+ normalized,
+ Dispatch::Directory {
+ was_file: in_dmap.is_some(),
+ },
+ )))
+ } else {
+ Some(Ok((
+ normalized,
+ Dispatch::Bad(BadMatch::BadType(
+ // TODO do more than unknown
+ // Support for all `BadType` variant
+ // varies greatly between platforms.
+ // So far, no tests check the type and
+ // this should be good enough for most
+ // users.
+ BadType::Unknown,
+ )),
+ )))
+ }
+ };
+ }
+ Err(_) => {
+ if let Some(entry) = in_dmap {
return Some(Ok((
normalized,
- dispatch_found(
- &normalized,
- *entry,
- HgMetadata::from_metadata(meta),
- &dmap.copy_map,
- check_exec,
- list_clean,
- last_normal_time,
- ),
+ dispatch_missing(entry.state),
)));
}
- } else {
- if dmap.contains_key(normalized) {
- return Some(Ok((normalized, Dispatch::Removed)));
- }
+ }
+ };
+ None
+ })
+ .flatten()
+}
+
+#[derive(Debug, Copy, Clone)]
+pub struct StatusOptions {
+ /// Remember the most recent modification timeslot for status, to make
+ /// sure we won't miss future size-preserving file content modifications
+ /// that happen within the same timeslot.
+ pub last_normal_time: i64,
+ /// Whether we are on a filesystem with UNIX-like exec flags
+ pub check_exec: bool,
+ pub list_clean: bool,
+ pub list_unknown: bool,
+ pub list_ignored: bool,
+}
+
+/// Dispatch a single entry (file, folder, symlink...) found during `traverse`.
+/// If the entry is a folder that needs to be traversed, it will be handled
+/// in a separate thread.
+
+fn handle_traversed_entry<'a>(
+ scope: &rayon::Scope<'a>,
+ files_sender: &'a crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+ matcher: &'a (impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy + 'a,
+ dmap: &'a DirstateMap,
+ old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>,
+ ignore_fn: &'a IgnoreFnType,
+ dir_ignore_fn: &'a IgnoreFnType,
+ options: StatusOptions,
+ filename: HgPathBuf,
+ dir_entry: DirEntry,
+) -> IoResult<()> {
+ let file_type = dir_entry.file_type()?;
+ let entry_option = dmap.get(&filename);
+
+ if file_type.is_dir() {
+ handle_traversed_dir(
+ scope,
+ files_sender,
+ matcher,
+ root_dir,
+ dmap,
+ old_results,
+ ignore_fn,
+ dir_ignore_fn,
+ options,
+ entry_option,
+ filename,
+ );
+ } else if file_type.is_file() || file_type.is_symlink() {
+ if let Some(entry) = entry_option {
+ if matcher.matches_everything() || matcher.matches(&filename) {
+ let metadata = dir_entry.metadata()?;
+ files_sender
+ .send(Ok((
+ filename.to_owned(),
+ dispatch_found(
+ &filename,
+ *entry,
+ HgMetadata::from_metadata(metadata),
+ &dmap.copy_map,
+ options,
+ ),
+ )))
+ .unwrap();
+ }
+ } else if (matcher.matches_everything() || matcher.matches(&filename))
+ && !ignore_fn(&filename)
+ {
+ if (options.list_ignored || matcher.exact_match(&filename))
+ && dir_ignore_fn(&filename)
+ {
+ if options.list_ignored {
+ files_sender
+ .send(Ok((filename.to_owned(), Dispatch::Ignored)))
+ .unwrap();
+ }
+ } else {
+ files_sender
+ .send(Ok((filename.to_owned(), Dispatch::Unknown)))
+ .unwrap();
+ }
+ } else if ignore_fn(&filename) && options.list_ignored {
+ files_sender
+ .send(Ok((filename.to_owned(), Dispatch::Ignored)))
+ .unwrap();
+ }
+ } else if let Some(entry) = entry_option {
+ // Used to be a file or a folder, now something else.
+ if matcher.matches_everything() || matcher.matches(&filename) {
+ files_sender
+ .send(Ok((filename.to_owned(), dispatch_missing(entry.state))))
+ .unwrap();
+ }
+ }
+
+ Ok(())
+}
+
+/// A directory was found in the filesystem and needs to be traversed
+fn handle_traversed_dir<'a>(
+ scope: &rayon::Scope<'a>,
+ files_sender: &'a crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+ matcher: &'a (impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy + 'a,
+ dmap: &'a DirstateMap,
+ old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>,
+ ignore_fn: &'a IgnoreFnType,
+ dir_ignore_fn: &'a IgnoreFnType,
+ options: StatusOptions,
+ entry_option: Option<&'a DirstateEntry>,
+ directory: HgPathBuf,
+) {
+ scope.spawn(move |_| {
+ // Nested `if` until `rust-lang/rust#53668` is stable
+ if let Some(entry) = entry_option {
+ // Used to be a file, is now a folder
+ if matcher.matches_everything() || matcher.matches(&directory) {
+ files_sender
+ .send(Ok((
+ directory.to_owned(),
+ dispatch_missing(entry.state),
+ )))
+ .unwrap();
+ }
+ }
+ // Do we need to traverse it?
+ if !ignore_fn(&directory) || options.list_ignored {
+ traverse_dir(
+ files_sender,
+ matcher,
+ root_dir,
+ dmap,
+ directory,
+ &old_results,
+ ignore_fn,
+ dir_ignore_fn,
+ options,
+ )
+ .unwrap_or_else(|e| files_sender.send(Err(e)).unwrap())
+ }
+ });
+}
+
+/// Decides whether the directory needs to be listed, and if so handles the
+/// entries in a separate thread.
+fn traverse_dir<'a>(
+ files_sender: &crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+ matcher: &'a (impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy,
+ dmap: &'a DirstateMap,
+ directory: impl AsRef<HgPath>,
+ old_results: &FastHashMap<Cow<'a, HgPath>, Dispatch>,
+ ignore_fn: &IgnoreFnType,
+ dir_ignore_fn: &IgnoreFnType,
+ options: StatusOptions,
+) -> IoResult<()> {
+ let directory = directory.as_ref();
+ if directory.as_bytes() == b".hg" {
+ return Ok(());
+ }
+ let visit_entries = match matcher.visit_children_set(directory) {
+ VisitChildrenSet::Empty => return Ok(()),
+ VisitChildrenSet::This | VisitChildrenSet::Recursive => None,
+ VisitChildrenSet::Set(set) => Some(set),
+ };
+ let buf = hg_path_to_path_buf(directory)?;
+ let dir_path = root_dir.as_ref().join(buf);
+
+ let skip_dot_hg = !directory.as_bytes().is_empty();
+ let entries = match list_directory(dir_path, skip_dot_hg) {
+ Err(e) => match e.kind() {
+ ErrorKind::NotFound | ErrorKind::PermissionDenied => {
+ files_sender
+ .send(Ok((
+ directory.to_owned(),
+ Dispatch::Bad(BadMatch::OsError(
+ // Unwrapping here is OK because the error always
+ // is a real os error
+ e.raw_os_error().unwrap(),
+ )),
+ )))
+ .unwrap();
+ return Ok(());
+ }
+ _ => return Err(e),
+ },
+ Ok(entries) => entries,
+ };
+
+ rayon::scope(|scope| -> IoResult<()> {
+ for (filename, dir_entry) in entries {
+ if let Some(ref set) = visit_entries {
+ if !set.contains(filename.deref()) {
+ continue;
}
}
- Err(_) => {
- if let Some(entry) = dmap.get(normalized) {
- return Some(Ok((
- normalized,
- dispatch_missing(entry.state),
- )));
- }
+ // TODO normalize
+ let filename = if directory.is_empty() {
+ filename.to_owned()
+ } else {
+ directory.join(&filename)
+ };
+
+ if !old_results.contains_key(filename.deref()) {
+ handle_traversed_entry(
+ scope,
+ files_sender,
+ matcher,
+ root_dir,
+ dmap,
+ old_results,
+ ignore_fn,
+ dir_ignore_fn,
+ options,
+ filename,
+ dir_entry,
+ )?;
}
- };
- None
+ }
+ Ok(())
})
}
-/// Stat all entries in the `DirstateMap` and mark them for dispatch into
-/// the relevant collections.
+/// Walk the working directory recursively to look for changes compared to the
+/// current `DirstateMap`.
+///
+/// This takes a mutable reference to the results to account for the `extend`
+/// in timings
+#[timed]
+fn traverse<'a>(
+ matcher: &'a (impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy,
+ dmap: &'a DirstateMap,
+ path: impl AsRef<HgPath>,
+ old_results: &FastHashMap<Cow<'a, HgPath>, Dispatch>,
+ ignore_fn: &IgnoreFnType,
+ dir_ignore_fn: &IgnoreFnType,
+ options: StatusOptions,
+ results: &mut Vec<(Cow<'a, HgPath>, Dispatch)>,
+) -> IoResult<()> {
+ let root_dir = root_dir.as_ref();
+
+ // The traversal is done in parallel, so use a channel to gather entries.
+ // `crossbeam::Sender` is `Send`, while `mpsc::Sender` is not.
+ let (files_transmitter, files_receiver) = crossbeam::channel::unbounded();
+
+ traverse_dir(
+ &files_transmitter,
+ matcher,
+ root_dir,
+ &dmap,
+ path,
+ &old_results,
+ &ignore_fn,
+ &dir_ignore_fn,
+ options,
+ )?;
+
+ // Disconnect the channel so the receiver stops waiting
+ drop(files_transmitter);
+
+ // TODO don't collect. Find a way of replicating the behavior of
+ // `itertools::process_results`, but for `rayon::ParallelIterator`
+ let new_results: IoResult<Vec<(Cow<'a, HgPath>, Dispatch)>> =
+ files_receiver
+ .into_iter()
+ .map(|item| {
+ let (f, d) = item?;
+ Ok((Cow::Owned(f), d))
+ })
+ .collect();
+
+ results.par_extend(new_results?);
+
+ Ok(())
+}
+
+/// Stat all entries in the `DirstateMap` and mark them for dispatch.
fn stat_dmap_entries(
dmap: &DirstateMap,
root_dir: impl AsRef<Path> + Sync + Send,
- check_exec: bool,
- list_clean: bool,
- last_normal_time: i64,
+ options: StatusOptions,
) -> impl ParallelIterator<Item = IoResult<(&HgPath, Dispatch)>> {
dmap.par_iter().map(move |(filename, entry)| {
let filename: &HgPath = filename;
@@ -205,13 +592,11 @@
*entry,
HgMetadata::from_metadata(m),
&dmap.copy_map,
- check_exec,
- list_clean,
- last_normal_time,
+ options,
),
)),
Err(ref e)
- if e.kind() == std::io::ErrorKind::NotFound
+ if e.kind() == ErrorKind::NotFound
|| e.raw_os_error() == Some(20) =>
{
// Rust does not yet have an `ErrorKind` for
@@ -225,82 +610,302 @@
})
}
-pub struct StatusResult<'a> {
- pub modified: Vec<&'a HgPath>,
- pub added: Vec<&'a HgPath>,
- pub removed: Vec<&'a HgPath>,
- pub deleted: Vec<&'a HgPath>,
- pub clean: Vec<&'a HgPath>,
- /* TODO ignored
- * TODO unknown */
+/// This takes a mutable reference to the results to account for the `extend`
+/// in timings
+#[timed]
+fn extend_from_dmap<'a>(
+ dmap: &'a DirstateMap,
+ root_dir: impl AsRef<Path> + Sync + Send,
+ options: StatusOptions,
+ results: &mut Vec<(Cow<'a, HgPath>, Dispatch)>,
+) {
+ results.par_extend(
+ stat_dmap_entries(dmap, root_dir, options)
+ .flatten()
+ .map(|(filename, dispatch)| (Cow::Borrowed(filename), dispatch)),
+ );
}
+#[derive(Debug)]
+pub struct DirstateStatus<'a> {
+ pub modified: Vec<Cow<'a, HgPath>>,
+ pub added: Vec<Cow<'a, HgPath>>,
+ pub removed: Vec<Cow<'a, HgPath>>,
+ pub deleted: Vec<Cow<'a, HgPath>>,
+ pub clean: Vec<Cow<'a, HgPath>>,
+ pub ignored: Vec<Cow<'a, HgPath>>,
+ pub unknown: Vec<Cow<'a, HgPath>>,
+ pub bad: Vec<(Cow<'a, HgPath>, BadMatch)>,
+}
+
+#[timed]
fn build_response<'a>(
- results: impl IntoIterator<Item = IoResult<(&'a HgPath, Dispatch)>>,
-) -> IoResult<(Vec<&'a HgPath>, StatusResult<'a>)> {
+ results: impl IntoIterator<Item = (Cow<'a, HgPath>, Dispatch)>,
+) -> (Vec<Cow<'a, HgPath>>, DirstateStatus<'a>) {
let mut lookup = vec![];
let mut modified = vec![];
let mut added = vec![];
let mut removed = vec![];
let mut deleted = vec![];
let mut clean = vec![];
+ let mut ignored = vec![];
+ let mut unknown = vec![];
+ let mut bad = vec![];
- for res in results.into_iter() {
- let (filename, dispatch) = res?;
+ for (filename, dispatch) in results.into_iter() {
match dispatch {
- Dispatch::Unknown => {}
+ Dispatch::Unknown => unknown.push(filename),
Dispatch::Unsure => lookup.push(filename),
Dispatch::Modified => modified.push(filename),
Dispatch::Added => added.push(filename),
Dispatch::Removed => removed.push(filename),
Dispatch::Deleted => deleted.push(filename),
Dispatch::Clean => clean.push(filename),
+ Dispatch::Ignored => ignored.push(filename),
+ Dispatch::None => {}
+ Dispatch::Bad(reason) => bad.push((filename, reason)),
+ Dispatch::Directory { .. } => {}
}
}
- Ok((
+ (
lookup,
- StatusResult {
+ DirstateStatus {
modified,
added,
removed,
deleted,
clean,
+ ignored,
+ unknown,
+ bad,
},
- ))
+ )
+}
+
+#[derive(Debug)]
+pub enum StatusError {
+ IO(std::io::Error),
+ Path(HgPathError),
+ Pattern(PatternError),
+}
+
+pub type StatusResult<T> = Result<T, StatusError>;
+
+impl From<PatternError> for StatusError {
+ fn from(e: PatternError) -> Self {
+ StatusError::Pattern(e)
+ }
+}
+impl From<HgPathError> for StatusError {
+ fn from(e: HgPathError) -> Self {
+ StatusError::Path(e)
+ }
+}
+impl From<std::io::Error> for StatusError {
+ fn from(e: std::io::Error) -> Self {
+ StatusError::IO(e)
+ }
+}
+
+impl ToString for StatusError {
+ fn to_string(&self) -> String {
+ match self {
+ StatusError::IO(e) => e.to_string(),
+ StatusError::Path(e) => e.to_string(),
+ StatusError::Pattern(e) => e.to_string(),
+ }
+ }
}
+/// This takes a mutable reference to the results to account for the `extend`
+/// in timings
+#[timed]
+fn handle_unknowns<'a>(
+ dmap: &'a DirstateMap,
+ matcher: &(impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy,
+ options: StatusOptions,
+ results: &mut Vec<(Cow<'a, HgPath>, Dispatch)>,
+) -> IoResult<()> {
+ let to_visit: Vec<(&HgPath, &DirstateEntry)> = if results.is_empty()
+ && matcher.matches_everything()
+ {
+ dmap.iter().map(|(f, e)| (f.deref(), e)).collect()
+ } else {
+ // Only convert to a hashmap if needed.
+ let old_results: FastHashMap<_, _> = results.iter().cloned().collect();
+ dmap.iter()
+ .filter_map(move |(f, e)| {
+ if !old_results.contains_key(f.deref()) && matcher.matches(f) {
+ Some((f.deref(), e))
+ } else {
+ None
+ }
+ })
+ .collect()
+ };
+
+ // We walked all dirs under the roots that weren't ignored, and
+ // everything that matched was stat'ed and is already in results.
+ // The rest must thus be ignored or under a symlink.
+ let path_auditor = PathAuditor::new(root_dir);
+
+ // TODO don't collect. Find a way of replicating the behavior of
+ // `itertools::process_results`, but for `rayon::ParallelIterator`
+ let new_results: IoResult<Vec<_>> = to_visit
+ .into_par_iter()
+ .filter_map(|(filename, entry)| -> Option<IoResult<_>> {
+ // Report ignored items in the dmap as long as they are not
+ // under a symlink directory.
+ if path_auditor.check(filename) {
+ // TODO normalize for case-insensitive filesystems
+ let buf = match hg_path_to_path_buf(filename) {
+ Ok(x) => x,
+ Err(e) => return Some(Err(e.into())),
+ };
+ Some(Ok((
+ Cow::Borrowed(filename),
+ match root_dir.as_ref().join(&buf).symlink_metadata() {
+ // File was just ignored, no links, and exists
+ Ok(meta) => {
+ let metadata = HgMetadata::from_metadata(meta);
+ dispatch_found(
+ filename,
+ *entry,
+ metadata,
+ &dmap.copy_map,
+ options,
+ )
+ }
+ // File doesn't exist
+ Err(_) => dispatch_missing(entry.state),
+ },
+ )))
+ } else {
+ // It's either missing or under a symlink directory which
+ // we, in this case, report as missing.
+ Some(Ok((
+ Cow::Borrowed(filename),
+ dispatch_missing(entry.state),
+ )))
+ }
+ })
+ .collect();
+
+ results.par_extend(new_results?);
+
+ Ok(())
+}
+
+/// Get the status of files in the working directory.
+///
+/// This is the current entry-point for `hg-core` and is realistically unusable
+/// outside of a Python context because its arguments need to provide a lot of
+/// information that will not be necessary in the future.
+#[timed]
pub fn status<'a: 'c, 'b: 'c, 'c>(
dmap: &'a DirstateMap,
- matcher: &'b (impl Matcher),
- root_dir: impl AsRef<Path> + Sync + Send + Copy,
- list_clean: bool,
- last_normal_time: i64,
- check_exec: bool,
-) -> IoResult<(Vec<&'c HgPath>, StatusResult<'c>)> {
+ matcher: &'b (impl Matcher + Sync),
+ root_dir: impl AsRef<Path> + Sync + Send + Copy + 'c,
+ ignore_files: Vec<PathBuf>,
+ options: StatusOptions,
+) -> StatusResult<(
+ (Vec<Cow<'c, HgPath>>, DirstateStatus<'c>),
+ Vec<PatternFileWarning>,
+)> {
+ // Needs to outlive `dir_ignore_fn` since it's captured.
+ let mut ignore_fn: IgnoreFnType;
+
+ // Only involve real ignore mechanism if we're listing unknowns or ignored.
+ let (dir_ignore_fn, warnings): (IgnoreFnType, _) = if options.list_ignored
+ || options.list_unknown
+ {
+ let (ignore, warnings) = get_ignore_function(ignore_files, root_dir)?;
+
+ ignore_fn = ignore;
+ let dir_ignore_fn = Box::new(|dir: &_| {
+ // Is the path or one of its ancestors ignored?
+ if ignore_fn(dir) {
+ true
+ } else {
+ for p in find_dirs(dir) {
+ if ignore_fn(p) {
+ return true;
+ }
+ }
+ false
+ }
+ });
+ (dir_ignore_fn, warnings)
+ } else {
+ ignore_fn = Box::new(|&_| true);
+ (Box::new(|&_| true), vec![])
+ };
+
let files = matcher.file_set();
- let mut results = vec![];
- if let Some(files) = files {
- results.par_extend(walk_explicit(
- &files,
- &dmap,
- root_dir,
- check_exec,
- list_clean,
- last_normal_time,
- ));
+
+ // Step 1: check the files explicitly mentioned by the user
+ let explicit = walk_explicit(files, &dmap, root_dir, options);
+
+ // Collect results into a `Vec` because we do very few lookups in most
+ // cases.
+ let (work, mut results): (Vec<_>, Vec<_>) = explicit
+ .filter_map(Result::ok)
+ .map(|(filename, dispatch)| (Cow::Borrowed(filename), dispatch))
+ .partition(|(_, dispatch)| match dispatch {
+ Dispatch::Directory { .. } => true,
+ _ => false,
+ });
+
+ if !work.is_empty() {
+ // Hashmaps are quite a bit slower to build than vecs, so only build it
+ // if needed.
+ let old_results = results.iter().cloned().collect();
+
+ // Step 2: recursively check the working directory for changes if
+ // needed
+ for (dir, dispatch) in work {
+ match dispatch {
+ Dispatch::Directory { was_file } => {
+ if was_file {
+ results.push((dir.to_owned(), Dispatch::Removed));
+ }
+ if options.list_ignored
+ || options.list_unknown && !dir_ignore_fn(&dir)
+ {
+ traverse(
+ matcher,
+ root_dir,
+ &dmap,
+ &dir,
+ &old_results,
+ &ignore_fn,
+ &dir_ignore_fn,
+ options,
+ &mut results,
+ )?;
+ }
+ }
+ _ => unreachable!("There can only be directories in `work`"),
+ }
+ }
}
if !matcher.is_exact() {
- let stat_results = stat_dmap_entries(
- &dmap,
- root_dir,
- check_exec,
- list_clean,
- last_normal_time,
- );
- results.par_extend(stat_results);
+ // Step 3: Check the remaining files from the dmap.
+ // If a dmap file is not in results yet, it was either
+ // a) not matched b) ignored, c) missing, or d) under a
+ // symlink directory.
+
+ if options.list_unknown {
+ handle_unknowns(dmap, matcher, root_dir, options, &mut results)?;
+ } else {
+ // We may not have walked the full directory tree above, so stat
+ // and check everything we missed.
+ extend_from_dmap(&dmap, root_dir, options, &mut results);
+ }
}
- build_response(results)
+ Ok((build_response(results), warnings))
}
--- a/rust/hg-core/src/discovery.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/discovery.rs Thu Apr 16 22:51:09 2020 +0530
@@ -597,12 +597,12 @@
#[test]
fn test_limit_sample_less_than_half() {
- assert_eq!(full_disco().limit_sample((1..6).collect(), 2), vec![4, 2]);
+ assert_eq!(full_disco().limit_sample((1..6).collect(), 2), vec![2, 5]);
}
#[test]
fn test_limit_sample_more_than_half() {
- assert_eq!(full_disco().limit_sample((1..4).collect(), 2), vec![3, 2]);
+ assert_eq!(full_disco().limit_sample((1..4).collect(), 2), vec![1, 2]);
}
#[test]
--- a/rust/hg-core/src/filepatterns.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/filepatterns.rs Thu Apr 16 22:51:09 2020 +0530
@@ -8,12 +8,18 @@
//! Handling of Mercurial-specific patterns.
use crate::{
- utils::SliceExt, FastHashMap, LineNumber, PatternError, PatternFileError,
+ utils::{
+ files::{canonical_path, get_bytes_from_path, get_path_from_bytes},
+ hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError},
+ SliceExt,
+ },
+ FastHashMap, PatternError,
};
use lazy_static::lazy_static;
use regex::bytes::{NoExpand, Regex};
use std::fs::File;
use std::io::Read;
+use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::vec::Vec;
@@ -32,19 +38,33 @@
const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] =
&[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")];
+/// Appended to the regexp of globs
+const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)";
+
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PatternSyntax {
+ /// A regular expression
Regexp,
/// Glob that matches at the front of the path
RootGlob,
/// Glob that matches at any suffix of the path (still anchored at
/// slashes)
Glob,
+ /// a path relative to repository root, which is matched recursively
Path,
+ /// A path relative to cwd
RelPath,
+ /// an unrooted glob (*.rs matches Rust files in all dirs)
RelGlob,
+ /// A regexp that needn't match the start of a name
RelRegexp,
+ /// A path relative to repository root, which is matched non-recursively
+ /// (will not match subdirectories)
RootFiles,
+ /// A file of patterns to read and include
+ Include,
+ /// A file of patterns to match against files under the same directory
+ SubInclude,
}
/// Transforms a glob pattern into a regex
@@ -125,16 +145,20 @@
.collect()
}
-fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> {
+pub fn parse_pattern_syntax(
+ kind: &[u8],
+) -> Result<PatternSyntax, PatternError> {
match kind {
- b"re" => Ok(PatternSyntax::Regexp),
- b"path" => Ok(PatternSyntax::Path),
- b"relpath" => Ok(PatternSyntax::RelPath),
- b"rootfilesin" => Ok(PatternSyntax::RootFiles),
- b"relglob" => Ok(PatternSyntax::RelGlob),
- b"relre" => Ok(PatternSyntax::RelRegexp),
- b"glob" => Ok(PatternSyntax::Glob),
- b"rootglob" => Ok(PatternSyntax::RootGlob),
+ b"re:" => Ok(PatternSyntax::Regexp),
+ b"path:" => Ok(PatternSyntax::Path),
+ b"relpath:" => Ok(PatternSyntax::RelPath),
+ b"rootfilesin:" => Ok(PatternSyntax::RootFiles),
+ b"relglob:" => Ok(PatternSyntax::RelGlob),
+ b"relre:" => Ok(PatternSyntax::RelRegexp),
+ b"glob:" => Ok(PatternSyntax::Glob),
+ b"rootglob:" => Ok(PatternSyntax::RootGlob),
+ b"include:" => Ok(PatternSyntax::Include),
+ b"subinclude:" => Ok(PatternSyntax::SubInclude),
_ => Err(PatternError::UnsupportedSyntax(
String::from_utf8_lossy(kind).to_string(),
)),
@@ -144,21 +168,25 @@
/// Builds the regex that corresponds to the given pattern.
/// If within a `syntax: regexp` context, returns the pattern,
/// otherwise, returns the corresponding regex.
-fn _build_single_regex(
- syntax: PatternSyntax,
- pattern: &[u8],
- globsuffix: &[u8],
-) -> Vec<u8> {
+fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> {
+ let IgnorePattern {
+ syntax, pattern, ..
+ } = entry;
if pattern.is_empty() {
return vec![];
}
match syntax {
- PatternSyntax::Regexp => pattern.to_owned(),
+ // The `regex` crate adds `.*` to the start and end of expressions
+ // if there are no anchors, so add them.
+ PatternSyntax::Regexp => [b"^", &pattern[..], b"$"].concat(),
PatternSyntax::RelRegexp => {
- if pattern[0] == b'^' {
+ // The `regex` crate accepts `**` while `re2` and Python's `re`
+ // do not. Checking for `*` correctly triggers the same error all
+ // engines.
+ if pattern[0] == b'^' || pattern[0] == b'*' {
return pattern.to_owned();
}
- [b".*", pattern].concat()
+ [&b".*"[..], pattern].concat()
}
PatternSyntax::Path | PatternSyntax::RelPath => {
if pattern == b"." {
@@ -168,49 +196,108 @@
}
PatternSyntax::RootFiles => {
let mut res = if pattern == b"." {
- vec![]
+ vec![b'^']
} else {
// Pattern is a directory name.
- [escape_pattern(pattern).as_slice(), b"/"].concat()
+ [b"^", escape_pattern(pattern).as_slice(), b"/"].concat()
};
// Anything after the pattern must be a non-directory.
res.extend(b"[^/]+$");
+ res.push(b'$');
res
}
PatternSyntax::RelGlob => {
let glob_re = glob_to_re(pattern);
if let Some(rest) = glob_re.drop_prefix(b"[^/]*") {
- [b".*", rest, globsuffix].concat()
+ [b".*", rest, GLOB_SUFFIX].concat()
} else {
- [b"(?:|.*/)", glob_re.as_slice(), globsuffix].concat()
+ [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat()
}
}
PatternSyntax::Glob | PatternSyntax::RootGlob => {
- [glob_to_re(pattern).as_slice(), globsuffix].concat()
+ [b"^", glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat()
}
+ PatternSyntax::Include | PatternSyntax::SubInclude => unreachable!(),
}
}
const GLOB_SPECIAL_CHARACTERS: [u8; 7] =
[b'*', b'?', b'[', b']', b'{', b'}', b'\\'];
+/// TODO support other platforms
+#[cfg(unix)]
+pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> {
+ if bytes.is_empty() {
+ return b".".to_vec();
+ }
+ let sep = b'/';
+
+ let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count();
+ if initial_slashes > 2 {
+ // POSIX allows one or two initial slashes, but treats three or more
+ // as single slash.
+ initial_slashes = 1;
+ }
+ let components = bytes
+ .split(|b| *b == sep)
+ .filter(|c| !(c.is_empty() || c == b"."))
+ .fold(vec![], |mut acc, component| {
+ if component != b".."
+ || (initial_slashes == 0 && acc.is_empty())
+ || (!acc.is_empty() && acc[acc.len() - 1] == b"..")
+ {
+ acc.push(component)
+ } else if !acc.is_empty() {
+ acc.pop();
+ }
+ acc
+ });
+ let mut new_bytes = components.join(&sep);
+
+ if initial_slashes > 0 {
+ let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect();
+ buf.extend(new_bytes);
+ new_bytes = buf;
+ }
+ if new_bytes.is_empty() {
+ b".".to_vec()
+ } else {
+ new_bytes
+ }
+}
+
/// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs
/// that don't need to be transformed into a regex.
pub fn build_single_regex(
- kind: &[u8],
- pat: &[u8],
- globsuffix: &[u8],
+ entry: &IgnorePattern,
) -> Result<Vec<u8>, PatternError> {
- let enum_kind = parse_pattern_syntax(kind)?;
- if enum_kind == PatternSyntax::RootGlob
- && !pat.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
+ let IgnorePattern {
+ pattern, syntax, ..
+ } = entry;
+ let pattern = match syntax {
+ PatternSyntax::RootGlob
+ | PatternSyntax::Path
+ | PatternSyntax::RelGlob
+ | PatternSyntax::RootFiles => normalize_path_bytes(&pattern),
+ PatternSyntax::Include | PatternSyntax::SubInclude => {
+ return Err(PatternError::NonRegexPattern(entry.clone()))
+ }
+ _ => pattern.to_owned(),
+ };
+ if *syntax == PatternSyntax::RootGlob
+ && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
{
- let mut escaped = escape_pattern(pat);
- escaped.extend(b"(?:/|$)");
+ // The `regex` crate adds `.*` to the start and end of expressions
+ // if there are no anchors, so add the start anchor.
+ let mut escaped = vec![b'^'];
+ escaped.extend(escape_pattern(&pattern));
+ escaped.extend(GLOB_SUFFIX);
Ok(escaped)
} else {
- Ok(_build_single_regex(enum_kind, pat, globsuffix))
+ let mut entry = entry.clone();
+ entry.pattern = pattern;
+ Ok(_build_single_regex(&entry))
}
}
@@ -222,24 +309,29 @@
m.insert(b"regexp".as_ref(), b"relre:".as_ref());
m.insert(b"glob".as_ref(), b"relglob:".as_ref());
m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
- m.insert(b"include".as_ref(), b"include".as_ref());
- m.insert(b"subinclude".as_ref(), b"subinclude".as_ref());
+ m.insert(b"include".as_ref(), b"include:".as_ref());
+ m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
m
};
}
-pub type PatternTuple = (Vec<u8>, LineNumber, Vec<u8>);
-type WarningTuple = (PathBuf, Vec<u8>);
+#[derive(Debug)]
+pub enum PatternFileWarning {
+ /// (file path, syntax bytes)
+ InvalidSyntax(PathBuf, Vec<u8>),
+ /// File path
+ NoSuchFile(PathBuf),
+}
pub fn parse_pattern_file_contents<P: AsRef<Path>>(
lines: &[u8],
file_path: P,
warn: bool,
-) -> (Vec<PatternTuple>, Vec<WarningTuple>) {
+) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
let comment_escape_regex = Regex::new(r"\\#").unwrap();
- let mut inputs: Vec<PatternTuple> = vec![];
- let mut warnings: Vec<WarningTuple> = vec![];
+ let mut inputs: Vec<IgnorePattern> = vec![];
+ let mut warnings: Vec<PatternFileWarning> = vec![];
let mut current_syntax = b"relre:".as_ref();
@@ -267,8 +359,10 @@
if let Some(rel_syntax) = SYNTAXES.get(syntax) {
current_syntax = rel_syntax;
} else if warn {
- warnings
- .push((file_path.as_ref().to_owned(), syntax.to_owned()));
+ warnings.push(PatternFileWarning::InvalidSyntax(
+ file_path.as_ref().to_owned(),
+ syntax.to_owned(),
+ ));
}
continue;
}
@@ -288,34 +382,186 @@
}
}
- inputs.push((
- [line_syntax, line].concat(),
- line_number,
- line.to_owned(),
+ inputs.push(IgnorePattern::new(
+ parse_pattern_syntax(&line_syntax).map_err(|e| match e {
+ PatternError::UnsupportedSyntax(syntax) => {
+ PatternError::UnsupportedSyntaxInFile(
+ syntax,
+ file_path.as_ref().to_string_lossy().into(),
+ line_number,
+ )
+ }
+ _ => e,
+ })?,
+ &line,
+ &file_path,
));
}
- (inputs, warnings)
+ Ok((inputs, warnings))
}
pub fn read_pattern_file<P: AsRef<Path>>(
file_path: P,
warn: bool,
-) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> {
- let mut f = File::open(file_path.as_ref())?;
+) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
+ let mut f = match File::open(file_path.as_ref()) {
+ Ok(f) => Ok(f),
+ Err(e) => match e.kind() {
+ std::io::ErrorKind::NotFound => {
+ return Ok((
+ vec![],
+ vec![PatternFileWarning::NoSuchFile(
+ file_path.as_ref().to_owned(),
+ )],
+ ))
+ }
+ _ => Err(e),
+ },
+ }?;
let mut contents = Vec::new();
f.read_to_end(&mut contents)?;
- Ok(parse_pattern_file_contents(&contents, file_path, warn))
+ Ok(parse_pattern_file_contents(&contents, file_path, warn)?)
+}
+
+/// Represents an entry in an "ignore" file.
+#[derive(Debug, Eq, PartialEq, Clone)]
+pub struct IgnorePattern {
+ pub syntax: PatternSyntax,
+ pub pattern: Vec<u8>,
+ pub source: PathBuf,
+}
+
+impl IgnorePattern {
+ pub fn new(
+ syntax: PatternSyntax,
+ pattern: &[u8],
+ source: impl AsRef<Path>,
+ ) -> Self {
+ Self {
+ syntax,
+ pattern: pattern.to_owned(),
+ source: source.as_ref().to_owned(),
+ }
+ }
+}
+
+pub type PatternResult<T> = Result<T, PatternError>;
+
+/// Wrapper for `read_pattern_file` that also recursively expands `include:`
+/// patterns.
+///
+/// `subinclude:` is not treated as a special pattern here: unraveling them
+/// needs to occur in the "ignore" phase.
+pub fn get_patterns_from_file(
+ pattern_file: impl AsRef<Path>,
+ root_dir: impl AsRef<Path>,
+) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> {
+ let (patterns, mut warnings) = read_pattern_file(&pattern_file, true)?;
+ let patterns = patterns
+ .into_iter()
+ .flat_map(|entry| -> PatternResult<_> {
+ let IgnorePattern {
+ syntax,
+ pattern,
+ source: _,
+ } = &entry;
+ Ok(match syntax {
+ PatternSyntax::Include => {
+ let inner_include =
+ root_dir.as_ref().join(get_path_from_bytes(&pattern));
+ let (inner_pats, inner_warnings) = get_patterns_from_file(
+ &inner_include,
+ root_dir.as_ref(),
+ )?;
+ warnings.extend(inner_warnings);
+ inner_pats
+ }
+ _ => vec![entry],
+ })
+ })
+ .flatten()
+ .collect();
+
+ Ok((patterns, warnings))
+}
+
+/// Holds all the information needed to handle a `subinclude:` pattern.
+pub struct SubInclude {
+ /// Will be used for repository (hg) paths that start with this prefix.
+ /// It is relative to the current working directory, so comparing against
+ /// repository paths is painless.
+ pub prefix: HgPathBuf,
+ /// The file itself, containing the patterns
+ pub path: PathBuf,
+ /// Folder in the filesystem where this it applies
+ pub root: PathBuf,
+}
+
+impl SubInclude {
+ pub fn new(
+ root_dir: impl AsRef<Path>,
+ pattern: &[u8],
+ source: impl AsRef<Path>,
+ ) -> Result<SubInclude, HgPathError> {
+ let normalized_source =
+ normalize_path_bytes(&get_bytes_from_path(source));
+
+ let source_root = get_path_from_bytes(&normalized_source);
+ let source_root = source_root.parent().unwrap_or(source_root.deref());
+
+ let path = source_root.join(get_path_from_bytes(pattern));
+ let new_root = path.parent().unwrap_or(path.deref());
+
+ let prefix = canonical_path(&root_dir, &root_dir, new_root)?;
+
+ Ok(Self {
+ prefix: path_to_hg_path_buf(prefix).and_then(|mut p| {
+ if !p.is_empty() {
+ p.push(b'/');
+ }
+ Ok(p)
+ })?,
+ path: path.to_owned(),
+ root: new_root.to_owned(),
+ })
+ }
+}
+
+/// Separate and pre-process subincludes from other patterns for the "ignore"
+/// phase.
+pub fn filter_subincludes(
+ ignore_patterns: &[IgnorePattern],
+ root_dir: impl AsRef<Path>,
+) -> Result<(Vec<SubInclude>, Vec<&IgnorePattern>), HgPathError> {
+ let mut subincludes = vec![];
+ let mut others = vec![];
+
+ for ignore_pattern in ignore_patterns.iter() {
+ let IgnorePattern {
+ syntax,
+ pattern,
+ source,
+ } = ignore_pattern;
+ if *syntax == PatternSyntax::SubInclude {
+ subincludes.push(SubInclude::new(&root_dir, pattern, &source)?);
+ } else {
+ others.push(ignore_pattern)
+ }
+ }
+ Ok((subincludes, others))
}
#[cfg(test)]
mod tests {
use super::*;
+ use pretty_assertions::assert_eq;
#[test]
fn escape_pattern_test() {
- let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
+ let untouched =
+ br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
assert_eq!(escape_pattern(untouched), untouched.to_vec());
// All escape codes
assert_eq!(
@@ -342,39 +588,78 @@
let lines = b"syntax: glob\n*.elc";
assert_eq!(
- vec![(b"relglob:*.elc".to_vec(), 2, b"*.elc".to_vec())],
parse_pattern_file_contents(lines, Path::new("file_path"), false)
+ .unwrap()
.0,
+ vec![IgnorePattern::new(
+ PatternSyntax::RelGlob,
+ b"*.elc",
+ Path::new("file_path")
+ )],
);
let lines = b"syntax: include\nsyntax: glob";
assert_eq!(
parse_pattern_file_contents(lines, Path::new("file_path"), false)
+ .unwrap()
.0,
vec![]
);
let lines = b"glob:**.o";
assert_eq!(
parse_pattern_file_contents(lines, Path::new("file_path"), false)
+ .unwrap()
.0,
- vec![(b"relglob:**.o".to_vec(), 1, b"**.o".to_vec())]
+ vec![IgnorePattern::new(
+ PatternSyntax::RelGlob,
+ b"**.o",
+ Path::new("file_path")
+ )]
+ );
+ }
+
+ #[test]
+ fn test_build_single_regex() {
+ assert_eq!(
+ build_single_regex(&IgnorePattern::new(
+ PatternSyntax::RelGlob,
+ b"rust/target/",
+ Path::new("")
+ ))
+ .unwrap(),
+ br"(?:.*/)?rust/target(?:/|$)".to_vec(),
);
}
#[test]
fn test_build_single_regex_shortcut() {
assert_eq!(
- br"(?:/|$)".to_vec(),
- build_single_regex(b"rootglob", b"", b"").unwrap()
+ build_single_regex(&IgnorePattern::new(
+ PatternSyntax::RootGlob,
+ b"",
+ Path::new("")
+ ))
+ .unwrap(),
+ br"^\.(?:/|$)".to_vec(),
);
assert_eq!(
- br"whatever(?:/|$)".to_vec(),
- build_single_regex(b"rootglob", b"whatever", b"").unwrap()
+ build_single_regex(&IgnorePattern::new(
+ PatternSyntax::RootGlob,
+ b"whatever",
+ Path::new("")
+ ))
+ .unwrap(),
+ br"^whatever(?:/|$)".to_vec(),
);
assert_eq!(
- br"[^/]*\.o".to_vec(),
- build_single_regex(b"rootglob", b"*.o", b"").unwrap()
+ build_single_regex(&IgnorePattern::new(
+ PatternSyntax::RootGlob,
+ b"*.o",
+ Path::new("")
+ ))
+ .unwrap(),
+ br"^[^/]*\.o(?:/|$)".to_vec(),
);
}
}
--- a/rust/hg-core/src/lib.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/lib.rs Thu Apr 16 22:51:09 2020 +0530
@@ -13,7 +13,9 @@
dirs_multiset::{DirsMultiset, DirsMultisetIter},
dirstate_map::DirstateMap,
parsers::{pack_dirstate, parse_dirstate, PARENT_SIZE},
- status::{status, StatusResult},
+ status::{
+ status, BadMatch, BadType, DirstateStatus, StatusError, StatusOptions,
+ },
CopyMap, CopyMapIter, DirstateEntry, DirstateParents, EntryState,
StateMap, StateMapIter,
};
@@ -21,6 +23,8 @@
pub mod matchers;
pub mod revlog;
pub use revlog::*;
+#[cfg(feature = "with-re2")]
+pub mod re2;
pub mod utils;
// Remove this to see (potential) non-artificial compile failures. MacOS
@@ -31,13 +35,18 @@
likely not behave correctly on other platforms."
);
-use crate::utils::hg_path::HgPathBuf;
+use crate::utils::hg_path::{HgPathBuf, HgPathError};
pub use filepatterns::{
- build_single_regex, read_pattern_file, PatternSyntax, PatternTuple,
+ parse_pattern_syntax, read_pattern_file, IgnorePattern,
+ PatternFileWarning, PatternSyntax,
};
use std::collections::HashMap;
use twox_hash::RandomXxHashBuilder64;
+/// This is a contract between the `micro-timer` crate and us, to expose
+/// the `log` crate as `crate::log`.
+use log;
+
pub type LineNumber = usize;
/// Rust's default hasher is too slow because it tries to prevent collision
@@ -87,22 +96,22 @@
pub enum DirstateMapError {
PathNotFound(HgPathBuf),
EmptyPath,
- ConsecutiveSlashes,
+ InvalidPath(HgPathError),
}
impl ToString for DirstateMapError {
fn to_string(&self) -> String {
- use crate::DirstateMapError::*;
match self {
- PathNotFound(_) => "expected a value, found none".to_string(),
- EmptyPath => "Overflow in dirstate.".to_string(),
- ConsecutiveSlashes => {
- "found invalid consecutive slashes in path".to_string()
+ DirstateMapError::PathNotFound(_) => {
+ "expected a value, found none".to_string()
}
+ DirstateMapError::EmptyPath => "Overflow in dirstate.".to_string(),
+ DirstateMapError::InvalidPath(e) => e.to_string(),
}
}
}
+#[derive(Debug)]
pub enum DirstateError {
Parse(DirstateParseError),
Pack(DirstatePackError),
@@ -124,18 +133,44 @@
#[derive(Debug)]
pub enum PatternError {
+ Path(HgPathError),
UnsupportedSyntax(String),
+ UnsupportedSyntaxInFile(String, String, usize),
+ TooLong(usize),
+ IO(std::io::Error),
+ /// Needed a pattern that can be turned into a regex but got one that
+ /// can't. This should only happen through programmer error.
+ NonRegexPattern(IgnorePattern),
+ /// This is temporary, see `re2/mod.rs`.
+ /// This will cause a fallback to Python.
+ Re2NotInstalled,
}
-#[derive(Debug)]
-pub enum PatternFileError {
- IO(std::io::Error),
- Pattern(PatternError, LineNumber),
-}
-
-impl From<std::io::Error> for PatternFileError {
- fn from(e: std::io::Error) -> Self {
- PatternFileError::IO(e)
+impl ToString for PatternError {
+ fn to_string(&self) -> String {
+ match self {
+ PatternError::UnsupportedSyntax(syntax) => {
+ format!("Unsupported syntax {}", syntax)
+ }
+ PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => {
+ format!(
+ "{}:{}: unsupported syntax {}",
+ file_path, line, syntax
+ )
+ }
+ PatternError::TooLong(size) => {
+ format!("matcher pattern is too long ({} bytes)", size)
+ }
+ PatternError::IO(e) => e.to_string(),
+ PatternError::Path(e) => e.to_string(),
+ PatternError::NonRegexPattern(pattern) => {
+ format!("'{:?}' cannot be turned into a regex", pattern)
+ }
+ PatternError::Re2NotInstalled => {
+ "Re2 is not installed, cannot use regex functionality."
+ .to_string()
+ }
+ }
}
}
@@ -150,3 +185,15 @@
DirstateError::IO(e)
}
}
+
+impl From<std::io::Error> for PatternError {
+ fn from(e: std::io::Error) -> Self {
+ PatternError::IO(e)
+ }
+}
+
+impl From<HgPathError> for PatternError {
+ fn from(e: HgPathError) -> Self {
+ PatternError::Path(e)
+ }
+}
--- a/rust/hg-core/src/matchers.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/matchers.rs Thu Apr 16 22:51:09 2020 +0530
@@ -7,10 +7,31 @@
//! Structs and types for matching files and directories.
-use crate::{utils::hg_path::HgPath, DirsMultiset, DirstateMapError};
+#[cfg(feature = "with-re2")]
+use crate::re2::Re2;
+use crate::{
+ dirstate::dirs_multiset::DirsChildrenMultiset,
+ filepatterns::{
+ build_single_regex, filter_subincludes, get_patterns_from_file,
+ PatternFileWarning, PatternResult, SubInclude,
+ },
+ utils::{
+ files::find_dirs,
+ hg_path::{HgPath, HgPathBuf},
+ Escaped,
+ },
+ DirsMultiset, DirstateMapError, FastHashMap, IgnorePattern, PatternError,
+ PatternSyntax,
+};
+
+use std::borrow::ToOwned;
use std::collections::HashSet;
+use std::fmt::{Display, Error, Formatter};
use std::iter::FromIterator;
+use std::ops::Deref;
+use std::path::{Path, PathBuf};
+#[derive(Debug, PartialEq)]
pub enum VisitChildrenSet<'a> {
/// Don't visit anything
Empty,
@@ -163,12 +184,48 @@
}
fn visit_children_set(
&self,
- _directory: impl AsRef<HgPath>,
+ directory: impl AsRef<HgPath>,
) -> VisitChildrenSet {
- // TODO implement once we have `status.traverse`
- // This is useless until unknown files are taken into account
- // Which will not need to happen before the `IncludeMatcher`.
- unimplemented!()
+ if self.files.is_empty() || !self.dirs.contains(&directory) {
+ return VisitChildrenSet::Empty;
+ }
+ let dirs_as_set = self.dirs.iter().map(|k| k.deref()).collect();
+
+ let mut candidates: HashSet<&HgPath> =
+ self.files.union(&dirs_as_set).map(|k| *k).collect();
+ candidates.remove(HgPath::new(b""));
+
+ if !directory.as_ref().is_empty() {
+ let directory = [directory.as_ref().as_bytes(), b"/"].concat();
+ candidates = candidates
+ .iter()
+ .filter_map(|c| {
+ if c.as_bytes().starts_with(&directory) {
+ Some(HgPath::new(&c.as_bytes()[directory.len()..]))
+ } else {
+ None
+ }
+ })
+ .collect();
+ }
+
+ // `self.dirs` includes all of the directories, recursively, so if
+ // we're attempting to match 'foo/bar/baz.txt', it'll have '', 'foo',
+ // 'foo/bar' in it. Thus we can safely ignore a candidate that has a
+ // '/' in it, indicating it's for a subdir-of-a-subdir; the immediate
+ // subdir will be in there without a slash.
+ VisitChildrenSet::Set(
+ candidates
+ .iter()
+ .filter_map(|c| {
+ if c.bytes().all(|b| *b != b'/') {
+ Some(*c)
+ } else {
+ None
+ }
+ })
+ .collect(),
+ )
}
fn matches_everything(&self) -> bool {
false
@@ -177,3 +234,693 @@
true
}
}
+
+/// Matches files that are included in the ignore rules.
+#[cfg_attr(
+ feature = "with-re2",
+ doc = r##"
+```
+use hg::{
+ matchers::{IncludeMatcher, Matcher},
+ IgnorePattern,
+ PatternSyntax,
+ utils::hg_path::HgPath
+};
+use std::path::Path;
+///
+let ignore_patterns =
+vec![IgnorePattern::new(PatternSyntax::RootGlob, b"this*", Path::new(""))];
+let (matcher, _) = IncludeMatcher::new(ignore_patterns, "").unwrap();
+///
+assert_eq!(matcher.matches(HgPath::new(b"testing")), false);
+assert_eq!(matcher.matches(HgPath::new(b"this should work")), true);
+assert_eq!(matcher.matches(HgPath::new(b"this also")), true);
+assert_eq!(matcher.matches(HgPath::new(b"but not this")), false);
+```
+"##
+)]
+pub struct IncludeMatcher<'a> {
+ patterns: Vec<u8>,
+ match_fn: Box<dyn for<'r> Fn(&'r HgPath) -> bool + 'a + Sync>,
+ /// Whether all the patterns match a prefix (i.e. recursively)
+ prefix: bool,
+ roots: HashSet<HgPathBuf>,
+ dirs: HashSet<HgPathBuf>,
+ parents: HashSet<HgPathBuf>,
+}
+
+impl<'a> Matcher for IncludeMatcher<'a> {
+ fn file_set(&self) -> Option<&HashSet<&HgPath>> {
+ None
+ }
+
+ fn exact_match(&self, _filename: impl AsRef<HgPath>) -> bool {
+ false
+ }
+
+ fn matches(&self, filename: impl AsRef<HgPath>) -> bool {
+ (self.match_fn)(filename.as_ref())
+ }
+
+ fn visit_children_set(
+ &self,
+ directory: impl AsRef<HgPath>,
+ ) -> VisitChildrenSet {
+ let dir = directory.as_ref();
+ if self.prefix && self.roots.contains(dir) {
+ return VisitChildrenSet::Recursive;
+ }
+ if self.roots.contains(HgPath::new(b""))
+ || self.roots.contains(dir)
+ || self.dirs.contains(dir)
+ || find_dirs(dir).any(|parent_dir| self.roots.contains(parent_dir))
+ {
+ return VisitChildrenSet::This;
+ }
+
+ if self.parents.contains(directory.as_ref()) {
+ let multiset = self.get_all_parents_children();
+ if let Some(children) = multiset.get(dir) {
+ return VisitChildrenSet::Set(children.to_owned());
+ }
+ }
+ VisitChildrenSet::Empty
+ }
+
+ fn matches_everything(&self) -> bool {
+ false
+ }
+
+ fn is_exact(&self) -> bool {
+ false
+ }
+}
+
+#[cfg(feature = "with-re2")]
+/// Returns a function that matches an `HgPath` against the given regex
+/// pattern.
+///
+/// This can fail when the pattern is invalid or not supported by the
+/// underlying engine `Re2`, for instance anything with back-references.
+fn re_matcher(
+ pattern: &[u8],
+) -> PatternResult<impl Fn(&HgPath) -> bool + Sync> {
+ let regex = Re2::new(pattern);
+ let regex = regex.map_err(|e| PatternError::UnsupportedSyntax(e))?;
+ Ok(move |path: &HgPath| regex.is_match(path.as_bytes()))
+}
+
+#[cfg(not(feature = "with-re2"))]
+/// Returns a function that matches an `HgPath` against the given regex
+/// pattern.
+///
+/// This can fail when the pattern is invalid or not supported by the
+/// underlying engine (the `regex` crate), for instance anything with
+/// back-references.
+fn re_matcher(
+ pattern: &[u8],
+) -> PatternResult<impl Fn(&HgPath) -> bool + Sync> {
+ use std::io::Write;
+
+ let mut escaped_bytes = vec![];
+ for byte in pattern {
+ if *byte > 127 {
+ write!(escaped_bytes, "\\x{:x}", *byte).unwrap();
+ } else {
+ escaped_bytes.push(*byte);
+ }
+ }
+
+ // Avoid the cost of UTF8 checking
+ //
+ // # Safety
+ // This is safe because we escaped all non-ASCII bytes.
+ let pattern_string = unsafe { String::from_utf8_unchecked(escaped_bytes) };
+ let re = regex::bytes::RegexBuilder::new(&pattern_string)
+ .unicode(false)
+ .build()
+ .map_err(|e| PatternError::UnsupportedSyntax(e.to_string()))?;
+
+ Ok(move |path: &HgPath| re.is_match(path.as_bytes()))
+}
+
+/// Returns the regex pattern and a function that matches an `HgPath` against
+/// said regex formed by the given ignore patterns.
+fn build_regex_match<'a>(
+ ignore_patterns: &'a [&'a IgnorePattern],
+) -> PatternResult<(Vec<u8>, Box<dyn Fn(&HgPath) -> bool + Sync>)> {
+ let regexps: Result<Vec<_>, PatternError> = ignore_patterns
+ .into_iter()
+ .map(|k| build_single_regex(*k))
+ .collect();
+ let regexps = regexps?;
+ let full_regex = regexps.join(&b'|');
+
+ let matcher = re_matcher(&full_regex)?;
+ let func = Box::new(move |filename: &HgPath| matcher(filename));
+
+ Ok((full_regex, func))
+}
+
+/// Returns roots and directories corresponding to each pattern.
+///
+/// This calculates the roots and directories exactly matching the patterns and
+/// returns a tuple of (roots, dirs). It does not return other directories
+/// which may also need to be considered, like the parent directories.
+fn roots_and_dirs(
+ ignore_patterns: &[IgnorePattern],
+) -> (Vec<HgPathBuf>, Vec<HgPathBuf>) {
+ let mut roots = Vec::new();
+ let mut dirs = Vec::new();
+
+ for ignore_pattern in ignore_patterns {
+ let IgnorePattern {
+ syntax, pattern, ..
+ } = ignore_pattern;
+ match syntax {
+ PatternSyntax::RootGlob | PatternSyntax::Glob => {
+ let mut root = vec![];
+
+ for p in pattern.split(|c| *c == b'/') {
+ if p.iter().any(|c| match *c {
+ b'[' | b'{' | b'*' | b'?' => true,
+ _ => false,
+ }) {
+ break;
+ }
+ root.push(HgPathBuf::from_bytes(p));
+ }
+ let buf =
+ root.iter().fold(HgPathBuf::new(), |acc, r| acc.join(r));
+ roots.push(buf);
+ }
+ PatternSyntax::Path | PatternSyntax::RelPath => {
+ let pat = HgPath::new(if pattern == b"." {
+ &[] as &[u8]
+ } else {
+ pattern
+ });
+ roots.push(pat.to_owned());
+ }
+ PatternSyntax::RootFiles => {
+ let pat = if pattern == b"." {
+ &[] as &[u8]
+ } else {
+ pattern
+ };
+ dirs.push(HgPathBuf::from_bytes(pat));
+ }
+ _ => {
+ roots.push(HgPathBuf::new());
+ }
+ }
+ }
+ (roots, dirs)
+}
+
+/// Paths extracted from patterns
+#[derive(Debug, PartialEq)]
+struct RootsDirsAndParents {
+ /// Directories to match recursively
+ pub roots: HashSet<HgPathBuf>,
+ /// Directories to match non-recursively
+ pub dirs: HashSet<HgPathBuf>,
+ /// Implicitly required directories to go to items in either roots or dirs
+ pub parents: HashSet<HgPathBuf>,
+}
+
+/// Extract roots, dirs and parents from patterns.
+fn roots_dirs_and_parents(
+ ignore_patterns: &[IgnorePattern],
+) -> PatternResult<RootsDirsAndParents> {
+ let (roots, dirs) = roots_and_dirs(ignore_patterns);
+
+ let mut parents = HashSet::new();
+
+ parents.extend(
+ DirsMultiset::from_manifest(&dirs)
+ .map_err(|e| match e {
+ DirstateMapError::InvalidPath(e) => e,
+ _ => unreachable!(),
+ })?
+ .iter()
+ .map(|k| k.to_owned()),
+ );
+ parents.extend(
+ DirsMultiset::from_manifest(&roots)
+ .map_err(|e| match e {
+ DirstateMapError::InvalidPath(e) => e,
+ _ => unreachable!(),
+ })?
+ .iter()
+ .map(|k| k.to_owned()),
+ );
+
+ Ok(RootsDirsAndParents {
+ roots: HashSet::from_iter(roots),
+ dirs: HashSet::from_iter(dirs),
+ parents,
+ })
+}
+
+/// Returns a function that checks whether a given file (in the general sense)
+/// should be matched.
+fn build_match<'a, 'b>(
+ ignore_patterns: &'a [IgnorePattern],
+ root_dir: impl AsRef<Path>,
+) -> PatternResult<(
+ Vec<u8>,
+ Box<dyn Fn(&HgPath) -> bool + 'b + Sync>,
+ Vec<PatternFileWarning>,
+)> {
+ let mut match_funcs: Vec<Box<dyn Fn(&HgPath) -> bool + Sync>> = vec![];
+ // For debugging and printing
+ let mut patterns = vec![];
+ let mut all_warnings = vec![];
+
+ let (subincludes, ignore_patterns) =
+ filter_subincludes(ignore_patterns, root_dir)?;
+
+ if !subincludes.is_empty() {
+ // Build prefix-based matcher functions for subincludes
+ let mut submatchers = FastHashMap::default();
+ let mut prefixes = vec![];
+
+ for SubInclude { prefix, root, path } in subincludes.into_iter() {
+ let (match_fn, warnings) =
+ get_ignore_function(vec![path.to_path_buf()], root)?;
+ all_warnings.extend(warnings);
+ prefixes.push(prefix.to_owned());
+ submatchers.insert(prefix.to_owned(), match_fn);
+ }
+
+ let match_subinclude = move |filename: &HgPath| {
+ for prefix in prefixes.iter() {
+ if let Some(rel) = filename.relative_to(prefix) {
+ if (submatchers.get(prefix).unwrap())(rel) {
+ return true;
+ }
+ }
+ }
+ false
+ };
+
+ match_funcs.push(Box::new(match_subinclude));
+ }
+
+ if !ignore_patterns.is_empty() {
+ // Either do dumb matching if all patterns are rootfiles, or match
+ // with a regex.
+ if ignore_patterns
+ .iter()
+ .all(|k| k.syntax == PatternSyntax::RootFiles)
+ {
+ let dirs: HashSet<_> = ignore_patterns
+ .iter()
+ .map(|k| k.pattern.to_owned())
+ .collect();
+ let mut dirs_vec: Vec<_> = dirs.iter().cloned().collect();
+
+ let match_func = move |path: &HgPath| -> bool {
+ let path = path.as_bytes();
+ let i = path.iter().rfind(|a| **a == b'/');
+ let dir = if let Some(i) = i {
+ &path[..*i as usize]
+ } else {
+ b"."
+ };
+ dirs.contains(dir.deref())
+ };
+ match_funcs.push(Box::new(match_func));
+
+ patterns.extend(b"rootfilesin: ");
+ dirs_vec.sort();
+ patterns.extend(dirs_vec.escaped_bytes());
+ } else {
+ let (new_re, match_func) = build_regex_match(&ignore_patterns)?;
+ patterns = new_re;
+ match_funcs.push(match_func)
+ }
+ }
+
+ Ok(if match_funcs.len() == 1 {
+ (patterns, match_funcs.remove(0), all_warnings)
+ } else {
+ (
+ patterns,
+ Box::new(move |f: &HgPath| -> bool {
+ match_funcs.iter().any(|match_func| match_func(f))
+ }),
+ all_warnings,
+ )
+ })
+}
+
+/// Parses all "ignore" files with their recursive includes and returns a
+/// function that checks whether a given file (in the general sense) should be
+/// ignored.
+pub fn get_ignore_function<'a>(
+ all_pattern_files: Vec<PathBuf>,
+ root_dir: impl AsRef<Path>,
+) -> PatternResult<(
+ Box<dyn for<'r> Fn(&'r HgPath) -> bool + Sync + 'a>,
+ Vec<PatternFileWarning>,
+)> {
+ let mut all_patterns = vec![];
+ let mut all_warnings = vec![];
+
+ for pattern_file in all_pattern_files.into_iter() {
+ let (patterns, warnings) =
+ get_patterns_from_file(pattern_file, &root_dir)?;
+
+ all_patterns.extend(patterns.to_owned());
+ all_warnings.extend(warnings);
+ }
+ let (matcher, warnings) = IncludeMatcher::new(all_patterns, root_dir)?;
+ all_warnings.extend(warnings);
+ Ok((
+ Box::new(move |path: &HgPath| matcher.matches(path)),
+ all_warnings,
+ ))
+}
+
+impl<'a> IncludeMatcher<'a> {
+ pub fn new(
+ ignore_patterns: Vec<IgnorePattern>,
+ root_dir: impl AsRef<Path>,
+ ) -> PatternResult<(Self, Vec<PatternFileWarning>)> {
+ let (patterns, match_fn, warnings) =
+ build_match(&ignore_patterns, root_dir)?;
+ let RootsDirsAndParents {
+ roots,
+ dirs,
+ parents,
+ } = roots_dirs_and_parents(&ignore_patterns)?;
+
+ let prefix = ignore_patterns.iter().any(|k| match k.syntax {
+ PatternSyntax::Path | PatternSyntax::RelPath => true,
+ _ => false,
+ });
+
+ Ok((
+ Self {
+ patterns,
+ match_fn,
+ prefix,
+ roots,
+ dirs,
+ parents,
+ },
+ warnings,
+ ))
+ }
+
+ fn get_all_parents_children(&self) -> DirsChildrenMultiset {
+ // TODO cache
+ let thing = self
+ .dirs
+ .iter()
+ .chain(self.roots.iter())
+ .chain(self.parents.iter());
+ DirsChildrenMultiset::new(thing, Some(&self.parents))
+ }
+}
+
+impl<'a> Display for IncludeMatcher<'a> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ write!(
+ f,
+ "IncludeMatcher(includes='{}')",
+ String::from_utf8_lossy(&self.patterns.escaped_bytes())
+ )
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use pretty_assertions::assert_eq;
+ use std::path::Path;
+
+ #[test]
+ fn test_roots_and_dirs() {
+ let pats = vec![
+ IgnorePattern::new(PatternSyntax::Glob, b"g/h/*", Path::new("")),
+ IgnorePattern::new(PatternSyntax::Glob, b"g/h", Path::new("")),
+ IgnorePattern::new(PatternSyntax::Glob, b"g*", Path::new("")),
+ ];
+ let (roots, dirs) = roots_and_dirs(&pats);
+
+ assert_eq!(
+ roots,
+ vec!(
+ HgPathBuf::from_bytes(b"g/h"),
+ HgPathBuf::from_bytes(b"g/h"),
+ HgPathBuf::new()
+ ),
+ );
+ assert_eq!(dirs, vec!());
+ }
+
+ #[test]
+ fn test_roots_dirs_and_parents() {
+ let pats = vec![
+ IgnorePattern::new(PatternSyntax::Glob, b"g/h/*", Path::new("")),
+ IgnorePattern::new(PatternSyntax::Glob, b"g/h", Path::new("")),
+ IgnorePattern::new(PatternSyntax::Glob, b"g*", Path::new("")),
+ ];
+
+ let mut roots = HashSet::new();
+ roots.insert(HgPathBuf::from_bytes(b"g/h"));
+ roots.insert(HgPathBuf::new());
+
+ let dirs = HashSet::new();
+
+ let mut parents = HashSet::new();
+ parents.insert(HgPathBuf::new());
+ parents.insert(HgPathBuf::from_bytes(b"g"));
+
+ assert_eq!(
+ roots_dirs_and_parents(&pats).unwrap(),
+ RootsDirsAndParents {
+ roots,
+ dirs,
+ parents
+ }
+ );
+ }
+
+ #[test]
+ fn test_filematcher_visit_children_set() {
+ // Visitchildrenset
+ let files = vec![HgPath::new(b"dir/subdir/foo.txt")];
+ let matcher = FileMatcher::new(&files).unwrap();
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"foo.txt"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Set(set)
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/foo.txt")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ }
+
+ #[test]
+ fn test_filematcher_visit_children_set_files_and_dirs() {
+ let files = vec![
+ HgPath::new(b"rootfile.txt"),
+ HgPath::new(b"a/file1.txt"),
+ HgPath::new(b"a/b/file2.txt"),
+ // No file in a/b/c
+ HgPath::new(b"a/b/c/d/file4.txt"),
+ ];
+ let matcher = FileMatcher::new(&files).unwrap();
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"a"));
+ set.insert(HgPath::new(b"rootfile.txt"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"b"));
+ set.insert(HgPath::new(b"file1.txt"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"a")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"c"));
+ set.insert(HgPath::new(b"file2.txt"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"a/b")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"d"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"a/b/c")),
+ VisitChildrenSet::Set(set)
+ );
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"file4.txt"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"a/b/c/d")),
+ VisitChildrenSet::Set(set)
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"a/b/c/d/e")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ }
+
+ #[cfg(feature = "with-re2")]
+ #[test]
+ fn test_includematcher() {
+ // VisitchildrensetPrefix
+ let (matcher, _) = IncludeMatcher::new(
+ vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )],
+ "",
+ )
+ .unwrap();
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Recursive
+ );
+ // OPT: This should probably be 'all' if its parent is?
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+
+ // VisitchildrensetRootfilesin
+ let (matcher, _) = IncludeMatcher::new(
+ vec![IgnorePattern::new(
+ PatternSyntax::RootFiles,
+ b"dir/subdir",
+ Path::new(""),
+ )],
+ "",
+ )
+ .unwrap();
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+
+ // VisitchildrensetGlob
+ let (matcher, _) = IncludeMatcher::new(
+ vec![IgnorePattern::new(
+ PatternSyntax::Glob,
+ b"dir/z*",
+ Path::new(""),
+ )],
+ "",
+ )
+ .unwrap();
+
+ let mut set = HashSet::new();
+ set.insert(HgPath::new(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::This
+ );
+ // OPT: these should probably be set().
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::This
+ );
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/re2/mod.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,21 @@
+/// re2 module
+///
+/// The Python implementation of Mercurial uses the Re2 regex engine when
+/// possible and if the bindings are installed, falling back to Python's `re`
+/// in case of unsupported syntax (Re2 is a non-backtracking engine).
+///
+/// Using it from Rust is not ideal. We need C++ bindings, a C++ compiler,
+/// Re2 needs to be installed... why not just use the `regex` crate?
+///
+/// Using Re2 from the Rust implementation guarantees backwards compatibility.
+/// We know it will work out of the box without needing to figure out the
+/// subtle differences in syntax. For example, `regex` currently does not
+/// support empty alternations (regex like `a||b`) which happens more often
+/// than we might think. Old benchmarks also showed worse performance from
+/// regex than with Re2, but the methodology and results were lost, so take
+/// this with a grain of salt.
+///
+/// The idea is to use Re2 for now as a temporary phase and then investigate
+/// how much work would be needed to use `regex`.
+mod re2;
+pub use re2::Re2;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/re2/re2.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,66 @@
+/*
+re2.rs
+
+Rust FFI bindings to Re2.
+
+Copyright 2020 Valentin Gatien-Baron
+
+This software may be used and distributed according to the terms of the
+GNU General Public License version 2 or any later version.
+*/
+use libc::{c_int, c_void};
+
+type Re2Ptr = *const c_void;
+
+pub struct Re2(Re2Ptr);
+
+/// `re2.h` says:
+/// "An "RE2" object is safe for concurrent use by multiple threads."
+unsafe impl Sync for Re2 {}
+
+/// These bind to the C ABI in `rust_re2.cpp`.
+extern "C" {
+ fn rust_re2_create(data: *const u8, len: usize) -> Re2Ptr;
+ fn rust_re2_destroy(re2: Re2Ptr);
+ fn rust_re2_ok(re2: Re2Ptr) -> bool;
+ fn rust_re2_error(
+ re2: Re2Ptr,
+ outdata: *mut *const u8,
+ outlen: *mut usize,
+ ) -> bool;
+ fn rust_re2_match(
+ re2: Re2Ptr,
+ data: *const u8,
+ len: usize,
+ anchor: c_int,
+ ) -> bool;
+}
+
+impl Re2 {
+ pub fn new(pattern: &[u8]) -> Result<Re2, String> {
+ unsafe {
+ let re2 = rust_re2_create(pattern.as_ptr(), pattern.len());
+ if rust_re2_ok(re2) {
+ Ok(Re2(re2))
+ } else {
+ let mut data: *const u8 = std::ptr::null();
+ let mut len: usize = 0;
+ rust_re2_error(re2, &mut data, &mut len);
+ Err(String::from_utf8_lossy(std::slice::from_raw_parts(
+ data, len,
+ ))
+ .to_string())
+ }
+ }
+ }
+
+ pub fn is_match(&self, data: &[u8]) -> bool {
+ unsafe { rust_re2_match(self.0, data.as_ptr(), data.len(), 1) }
+ }
+}
+
+impl Drop for Re2 {
+ fn drop(&mut self) {
+ unsafe { rust_re2_destroy(self.0) }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/re2/rust_re2.cpp Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,49 @@
+/*
+rust_re2.cpp
+
+C ABI export of Re2's C++ interface for Rust FFI.
+
+Copyright 2020 Valentin Gatien-Baron
+
+This software may be used and distributed according to the terms of the
+GNU General Public License version 2 or any later version.
+*/
+
+#include <re2/re2.h>
+using namespace re2;
+
+extern "C" {
+ RE2* rust_re2_create(const char* data, size_t len) {
+ RE2::Options o;
+ o.set_encoding(RE2::Options::Encoding::EncodingLatin1);
+ o.set_log_errors(false);
+ o.set_max_mem(50000000);
+
+ return new RE2(StringPiece(data, len), o);
+ }
+
+ void rust_re2_destroy(RE2* re) {
+ delete re;
+ }
+
+ bool rust_re2_ok(RE2* re) {
+ return re->ok();
+ }
+
+ void rust_re2_error(RE2* re, const char** outdata, size_t* outlen) {
+ const std::string& e = re->error();
+ *outdata = e.data();
+ *outlen = e.length();
+ }
+
+ bool rust_re2_match(RE2* re, char* data, size_t len, int ianchor) {
+ const StringPiece sp = StringPiece(data, len);
+
+ RE2::Anchor anchor =
+ ianchor == 0 ? RE2::Anchor::UNANCHORED :
+ (ianchor == 1 ? RE2::Anchor::ANCHOR_START :
+ RE2::Anchor::ANCHOR_BOTH);
+
+ return re->Match(sp, 0, len, anchor, NULL, 0);
+ }
+}
--- a/rust/hg-core/src/revlog.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/revlog.rs Thu Apr 16 22:51:09 2020 +0530
@@ -5,6 +5,10 @@
// GNU General Public License version 2 or any later version.
//! Mercurial concepts for handling revision history
+pub mod node;
+pub mod nodemap;
+pub use node::{Node, NodeError, NodePrefix, NodePrefixRef};
+
/// Mercurial revision numbers
///
/// As noted in revlog.c, revision numbers are actually encoded in
@@ -36,3 +40,17 @@
ParentOutOfRange(Revision),
WorkingDirectoryUnsupported,
}
+
+/// The Mercurial Revlog Index
+///
+/// This is currently limited to the minimal interface that is needed for
+/// the [`nodemap`](nodemap/index.html) module
+pub trait RevlogIndex {
+ /// Total number of Revisions referenced in this index
+ fn len(&self) -> usize;
+
+ /// Return a reference to the Node or `None` if rev is out of bounds
+ ///
+ /// `NULL_REVISION` is not considered to be out of bounds.
+ fn node(&self, rev: Revision) -> Option<&Node>;
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/revlog/node.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,429 @@
+// Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Definitions and utilities for Revision nodes
+//!
+//! In Mercurial code base, it is customary to call "a node" the binary SHA
+//! of a revision.
+
+use hex::{self, FromHex, FromHexError};
+
+/// The length in bytes of a `Node`
+///
+/// This constant is meant to ease refactors of this module, and
+/// are private so that calling code does not expect all nodes have
+/// the same size, should we support several formats concurrently in
+/// the future.
+const NODE_BYTES_LENGTH: usize = 20;
+
+/// The length in bytes of a `Node`
+///
+/// see also `NODES_BYTES_LENGTH` about it being private.
+const NODE_NYBBLES_LENGTH: usize = 2 * NODE_BYTES_LENGTH;
+
+/// Private alias for readability and to ease future change
+type NodeData = [u8; NODE_BYTES_LENGTH];
+
+/// Binary revision SHA
+///
+/// ## Future changes of hash size
+///
+/// To accomodate future changes of hash size, Rust callers
+/// should use the conversion methods at the boundaries (FFI, actual
+/// computation of hashes and I/O) only, and only if required.
+///
+/// All other callers outside of unit tests should just handle `Node` values
+/// and never make any assumption on the actual length, using [`nybbles_len`]
+/// if they need a loop boundary.
+///
+/// All methods that create a `Node` either take a type that enforces
+/// the size or fail immediately at runtime with [`ExactLengthRequired`].
+///
+/// [`nybbles_len`]: #method.nybbles_len
+/// [`ExactLengthRequired`]: struct.NodeError#variant.ExactLengthRequired
+#[derive(Clone, Debug, PartialEq)]
+#[repr(transparent)]
+pub struct Node {
+ data: NodeData,
+}
+
+/// The node value for NULL_REVISION
+pub const NULL_NODE: Node = Node {
+ data: [0; NODE_BYTES_LENGTH],
+};
+
+impl From<NodeData> for Node {
+ fn from(data: NodeData) -> Node {
+ Node { data }
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub enum NodeError {
+ ExactLengthRequired(usize, String),
+ PrefixTooLong(String),
+ HexError(FromHexError, String),
+}
+
+/// Low level utility function, also for prefixes
+fn get_nybble(s: &[u8], i: usize) -> u8 {
+ if i % 2 == 0 {
+ s[i / 2] >> 4
+ } else {
+ s[i / 2] & 0x0f
+ }
+}
+
+impl Node {
+ /// Retrieve the `i`th half-byte of the binary data.
+ ///
+ /// This is also the `i`th hexadecimal digit in numeric form,
+ /// also called a [nybble](https://en.wikipedia.org/wiki/Nibble).
+ pub fn get_nybble(&self, i: usize) -> u8 {
+ get_nybble(&self.data, i)
+ }
+
+ /// Length of the data, in nybbles
+ pub fn nybbles_len(&self) -> usize {
+ // public exposure as an instance method only, so that we can
+ // easily support several sizes of hashes if needed in the future.
+ NODE_NYBBLES_LENGTH
+ }
+
+ /// Convert from hexadecimal string representation
+ ///
+ /// Exact length is required.
+ ///
+ /// To be used in FFI and I/O only, in order to facilitate future
+ /// changes of hash format.
+ pub fn from_hex(hex: &str) -> Result<Node, NodeError> {
+ Ok(NodeData::from_hex(hex)
+ .map_err(|e| NodeError::from((e, hex)))?
+ .into())
+ }
+
+ /// Convert to hexadecimal string representation
+ ///
+ /// To be used in FFI and I/O only, in order to facilitate future
+ /// changes of hash format.
+ pub fn encode_hex(&self) -> String {
+ hex::encode(self.data)
+ }
+
+ /// Provide access to binary data
+ ///
+ /// This is needed by FFI layers, for instance to return expected
+ /// binary values to Python.
+ pub fn as_bytes(&self) -> &[u8] {
+ &self.data
+ }
+}
+
+impl<T: AsRef<str>> From<(FromHexError, T)> for NodeError {
+ fn from(err_offender: (FromHexError, T)) -> Self {
+ let (err, offender) = err_offender;
+ match err {
+ FromHexError::InvalidStringLength => {
+ NodeError::ExactLengthRequired(
+ NODE_NYBBLES_LENGTH,
+ offender.as_ref().to_owned(),
+ )
+ }
+ _ => NodeError::HexError(err, offender.as_ref().to_owned()),
+ }
+ }
+}
+
+/// The beginning of a binary revision SHA.
+///
+/// Since it can potentially come from an hexadecimal representation with
+/// odd length, it needs to carry around whether the last 4 bits are relevant
+/// or not.
+#[derive(Debug, PartialEq)]
+pub struct NodePrefix {
+ buf: Vec<u8>,
+ is_odd: bool,
+}
+
+impl NodePrefix {
+ /// Convert from hexadecimal string representation
+ ///
+ /// Similarly to `hex::decode`, can be used with Unicode string types
+ /// (`String`, `&str`) as well as bytes.
+ ///
+ /// To be used in FFI and I/O only, in order to facilitate future
+ /// changes of hash format.
+ pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, NodeError> {
+ let hex = hex.as_ref();
+ let len = hex.len();
+ if len > NODE_NYBBLES_LENGTH {
+ return Err(NodeError::PrefixTooLong(
+ String::from_utf8_lossy(hex).to_owned().to_string(),
+ ));
+ }
+
+ let is_odd = len % 2 == 1;
+ let even_part = if is_odd { &hex[..len - 1] } else { hex };
+ let mut buf: Vec<u8> = Vec::from_hex(&even_part)
+ .map_err(|e| (e, String::from_utf8_lossy(hex)))?;
+
+ if is_odd {
+ let latest_char = char::from(hex[len - 1]);
+ let latest_nybble = latest_char.to_digit(16).ok_or_else(|| {
+ (
+ FromHexError::InvalidHexCharacter {
+ c: latest_char,
+ index: len - 1,
+ },
+ String::from_utf8_lossy(hex),
+ )
+ })? as u8;
+ buf.push(latest_nybble << 4);
+ }
+ Ok(NodePrefix { buf, is_odd })
+ }
+
+ pub fn borrow(&self) -> NodePrefixRef {
+ NodePrefixRef {
+ buf: &self.buf,
+ is_odd: self.is_odd,
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct NodePrefixRef<'a> {
+ buf: &'a [u8],
+ is_odd: bool,
+}
+
+impl<'a> NodePrefixRef<'a> {
+ pub fn len(&self) -> usize {
+ if self.is_odd {
+ self.buf.len() * 2 - 1
+ } else {
+ self.buf.len() * 2
+ }
+ }
+
+ pub fn is_prefix_of(&self, node: &Node) -> bool {
+ if self.is_odd {
+ let buf = self.buf;
+ let last_pos = buf.len() - 1;
+ node.data.starts_with(buf.split_at(last_pos).0)
+ && node.data[last_pos] >> 4 == buf[last_pos] >> 4
+ } else {
+ node.data.starts_with(self.buf)
+ }
+ }
+
+ /// Retrieve the `i`th half-byte from the prefix.
+ ///
+ /// This is also the `i`th hexadecimal digit in numeric form,
+ /// also called a [nybble](https://en.wikipedia.org/wiki/Nibble).
+ pub fn get_nybble(&self, i: usize) -> u8 {
+ assert!(i < self.len());
+ get_nybble(self.buf, i)
+ }
+
+ /// Return the index first nybble that's different from `node`
+ ///
+ /// If the return value is `None` that means that `self` is
+ /// a prefix of `node`, but the current method is a bit slower
+ /// than `is_prefix_of`.
+ ///
+ /// Returned index is as in `get_nybble`, i.e., starting at 0.
+ pub fn first_different_nybble(&self, node: &Node) -> Option<usize> {
+ let buf = self.buf;
+ let until = if self.is_odd {
+ buf.len() - 1
+ } else {
+ buf.len()
+ };
+ for i in 0..until {
+ if buf[i] != node.data[i] {
+ if buf[i] & 0xf0 == node.data[i] & 0xf0 {
+ return Some(2 * i + 1);
+ } else {
+ return Some(2 * i);
+ }
+ }
+ }
+ if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 {
+ Some(until * 2)
+ } else {
+ None
+ }
+ }
+}
+
+/// A shortcut for full `Node` references
+impl<'a> From<&'a Node> for NodePrefixRef<'a> {
+ fn from(node: &'a Node) -> Self {
+ NodePrefixRef {
+ buf: &node.data,
+ is_odd: false,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn sample_node() -> Node {
+ let mut data = [0; NODE_BYTES_LENGTH];
+ data.copy_from_slice(&[
+ 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba,
+ 0x98, 0x76, 0x54, 0x32, 0x10, 0xde, 0xad, 0xbe, 0xef,
+ ]);
+ data.into()
+ }
+
+ /// Pad an hexadecimal string to reach `NODE_NYBBLES_LENGTH`
+ ///
+ /// The padding is made with zeros
+ pub fn hex_pad_right(hex: &str) -> String {
+ let mut res = hex.to_string();
+ while res.len() < NODE_NYBBLES_LENGTH {
+ res.push('0');
+ }
+ res
+ }
+
+ fn sample_node_hex() -> String {
+ hex_pad_right("0123456789abcdeffedcba9876543210deadbeef")
+ }
+
+ #[test]
+ fn test_node_from_hex() {
+ assert_eq!(Node::from_hex(&sample_node_hex()), Ok(sample_node()));
+
+ let mut short = hex_pad_right("0123");
+ short.pop();
+ short.pop();
+ assert_eq!(
+ Node::from_hex(&short),
+ Err(NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, short)),
+ );
+
+ let not_hex = hex_pad_right("012... oops");
+ assert_eq!(
+ Node::from_hex(¬_hex),
+ Err(NodeError::HexError(
+ FromHexError::InvalidHexCharacter { c: '.', index: 3 },
+ not_hex,
+ )),
+ );
+ }
+
+ #[test]
+ fn test_node_encode_hex() {
+ assert_eq!(sample_node().encode_hex(), sample_node_hex());
+ }
+
+ #[test]
+ fn test_prefix_from_hex() -> Result<(), NodeError> {
+ assert_eq!(
+ NodePrefix::from_hex("0e1")?,
+ NodePrefix {
+ buf: vec![14, 16],
+ is_odd: true
+ }
+ );
+ assert_eq!(
+ NodePrefix::from_hex("0e1a")?,
+ NodePrefix {
+ buf: vec![14, 26],
+ is_odd: false
+ }
+ );
+
+ // checking limit case
+ let node_as_vec = sample_node().data.iter().cloned().collect();
+ assert_eq!(
+ NodePrefix::from_hex(sample_node_hex())?,
+ NodePrefix {
+ buf: node_as_vec,
+ is_odd: false
+ }
+ );
+
+ Ok(())
+ }
+
+ #[test]
+ fn test_prefix_from_hex_errors() {
+ assert_eq!(
+ NodePrefix::from_hex("testgr"),
+ Err(NodeError::HexError(
+ FromHexError::InvalidHexCharacter { c: 't', index: 0 },
+ "testgr".to_string()
+ ))
+ );
+ let mut long = NULL_NODE.encode_hex();
+ long.push('c');
+ match NodePrefix::from_hex(&long)
+ .expect_err("should be refused as too long")
+ {
+ NodeError::PrefixTooLong(s) => assert_eq!(s, long),
+ err => panic!(format!("Should have been TooLong, got {:?}", err)),
+ }
+ }
+
+ #[test]
+ fn test_is_prefix_of() -> Result<(), NodeError> {
+ let mut node_data = [0; NODE_BYTES_LENGTH];
+ node_data[0] = 0x12;
+ node_data[1] = 0xca;
+ let node = Node::from(node_data);
+ assert!(NodePrefix::from_hex("12")?.borrow().is_prefix_of(&node));
+ assert!(!NodePrefix::from_hex("1a")?.borrow().is_prefix_of(&node));
+ assert!(NodePrefix::from_hex("12c")?.borrow().is_prefix_of(&node));
+ assert!(!NodePrefix::from_hex("12d")?.borrow().is_prefix_of(&node));
+ Ok(())
+ }
+
+ #[test]
+ fn test_get_nybble() -> Result<(), NodeError> {
+ let prefix = NodePrefix::from_hex("dead6789cafe")?;
+ assert_eq!(prefix.borrow().get_nybble(0), 13);
+ assert_eq!(prefix.borrow().get_nybble(7), 9);
+ Ok(())
+ }
+
+ #[test]
+ fn test_first_different_nybble_even_prefix() {
+ let prefix = NodePrefix::from_hex("12ca").unwrap();
+ let prefref = prefix.borrow();
+ let mut node = Node::from([0; NODE_BYTES_LENGTH]);
+ assert_eq!(prefref.first_different_nybble(&node), Some(0));
+ node.data[0] = 0x13;
+ assert_eq!(prefref.first_different_nybble(&node), Some(1));
+ node.data[0] = 0x12;
+ assert_eq!(prefref.first_different_nybble(&node), Some(2));
+ node.data[1] = 0xca;
+ // now it is a prefix
+ assert_eq!(prefref.first_different_nybble(&node), None);
+ }
+
+ #[test]
+ fn test_first_different_nybble_odd_prefix() {
+ let prefix = NodePrefix::from_hex("12c").unwrap();
+ let prefref = prefix.borrow();
+ let mut node = Node::from([0; NODE_BYTES_LENGTH]);
+ assert_eq!(prefref.first_different_nybble(&node), Some(0));
+ node.data[0] = 0x13;
+ assert_eq!(prefref.first_different_nybble(&node), Some(1));
+ node.data[0] = 0x12;
+ assert_eq!(prefref.first_different_nybble(&node), Some(2));
+ node.data[1] = 0xca;
+ // now it is a prefix
+ assert_eq!(prefref.first_different_nybble(&node), None);
+ }
+}
+
+#[cfg(test)]
+pub use tests::hex_pad_right;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/revlog/nodemap.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,1122 @@
+// Copyright 2018-2020 Georges Racinet <georges.racinet@octobus.net>
+// and Mercurial contributors
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+//! Indexing facilities for fast retrieval of `Revision` from `Node`
+//!
+//! This provides a variation on the 16-ary radix tree that is
+//! provided as "nodetree" in revlog.c, ready for append-only persistence
+//! on disk.
+//!
+//! Following existing implicit conventions, the "nodemap" terminology
+//! is used in a more abstract context.
+
+use super::{
+ node::NULL_NODE, Node, NodeError, NodePrefix, NodePrefixRef, Revision,
+ RevlogIndex, NULL_REVISION,
+};
+
+use std::cmp::max;
+use std::fmt;
+use std::mem;
+use std::ops::Deref;
+use std::ops::Index;
+use std::slice;
+
+#[derive(Debug, PartialEq)]
+pub enum NodeMapError {
+ MultipleResults,
+ InvalidNodePrefix(NodeError),
+ /// A `Revision` stored in the nodemap could not be found in the index
+ RevisionNotInIndex(Revision),
+}
+
+impl From<NodeError> for NodeMapError {
+ fn from(err: NodeError) -> Self {
+ NodeMapError::InvalidNodePrefix(err)
+ }
+}
+
+/// Mapping system from Mercurial nodes to revision numbers.
+///
+/// ## `RevlogIndex` and `NodeMap`
+///
+/// One way to think about their relationship is that
+/// the `NodeMap` is a prefix-oriented reverse index of the `Node` information
+/// carried by a [`RevlogIndex`].
+///
+/// Many of the methods in this trait take a `RevlogIndex` argument
+/// which is used for validation of their results. This index must naturally
+/// be the one the `NodeMap` is about, and it must be consistent.
+///
+/// Notably, the `NodeMap` must not store
+/// information about more `Revision` values than there are in the index.
+/// In these methods, an encountered `Revision` is not in the index, a
+/// [`RevisionNotInIndex`] error is returned.
+///
+/// In insert operations, the rule is thus that the `NodeMap` must always
+/// be updated after the `RevlogIndex`
+/// be updated first, and the `NodeMap` second.
+///
+/// [`RevisionNotInIndex`]: enum.NodeMapError.html#variant.RevisionNotInIndex
+/// [`RevlogIndex`]: ../trait.RevlogIndex.html
+pub trait NodeMap {
+ /// Find the unique `Revision` having the given `Node`
+ ///
+ /// If no Revision matches the given `Node`, `Ok(None)` is returned.
+ fn find_node(
+ &self,
+ index: &impl RevlogIndex,
+ node: &Node,
+ ) -> Result<Option<Revision>, NodeMapError> {
+ self.find_bin(index, node.into())
+ }
+
+ /// Find the unique Revision whose `Node` starts with a given binary prefix
+ ///
+ /// If no Revision matches the given prefix, `Ok(None)` is returned.
+ ///
+ /// If several Revisions match the given prefix, a [`MultipleResults`]
+ /// error is returned.
+ fn find_bin<'a>(
+ &self,
+ idx: &impl RevlogIndex,
+ prefix: NodePrefixRef<'a>,
+ ) -> Result<Option<Revision>, NodeMapError>;
+
+ /// Find the unique Revision whose `Node` hexadecimal string representation
+ /// starts with a given prefix
+ ///
+ /// If no Revision matches the given prefix, `Ok(None)` is returned.
+ ///
+ /// If several Revisions match the given prefix, a [`MultipleResults`]
+ /// error is returned.
+ fn find_hex(
+ &self,
+ idx: &impl RevlogIndex,
+ prefix: &str,
+ ) -> Result<Option<Revision>, NodeMapError> {
+ self.find_bin(idx, NodePrefix::from_hex(prefix)?.borrow())
+ }
+
+ /// Give the size of the shortest node prefix that determines
+ /// the revision uniquely.
+ ///
+ /// From a binary node prefix, if it is matched in the node map, this
+ /// returns the number of hexadecimal digits that would had sufficed
+ /// to find the revision uniquely.
+ ///
+ /// Returns `None` if no `Revision` could be found for the prefix.
+ ///
+ /// If several Revisions match the given prefix, a [`MultipleResults`]
+ /// error is returned.
+ fn unique_prefix_len_bin<'a>(
+ &self,
+ idx: &impl RevlogIndex,
+ node_prefix: NodePrefixRef<'a>,
+ ) -> Result<Option<usize>, NodeMapError>;
+
+ /// Same as `unique_prefix_len_bin`, with the hexadecimal representation
+ /// of the prefix as input.
+ fn unique_prefix_len_hex(
+ &self,
+ idx: &impl RevlogIndex,
+ prefix: &str,
+ ) -> Result<Option<usize>, NodeMapError> {
+ self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?.borrow())
+ }
+
+ /// Same as `unique_prefix_len_bin`, with a full `Node` as input
+ fn unique_prefix_len_node(
+ &self,
+ idx: &impl RevlogIndex,
+ node: &Node,
+ ) -> Result<Option<usize>, NodeMapError> {
+ self.unique_prefix_len_bin(idx, node.into())
+ }
+}
+
+pub trait MutableNodeMap: NodeMap {
+ fn insert<I: RevlogIndex>(
+ &mut self,
+ index: &I,
+ node: &Node,
+ rev: Revision,
+ ) -> Result<(), NodeMapError>;
+}
+
+/// Low level NodeTree [`Blocks`] elements
+///
+/// These are exactly as for instance on persistent storage.
+type RawElement = i32;
+
+/// High level representation of values in NodeTree
+/// [`Blocks`](struct.Block.html)
+///
+/// This is the high level representation that most algorithms should
+/// use.
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum Element {
+ Rev(Revision),
+ Block(usize),
+ None,
+}
+
+impl From<RawElement> for Element {
+ /// Conversion from low level representation, after endianness conversion.
+ ///
+ /// See [`Block`](struct.Block.html) for explanation about the encoding.
+ fn from(raw: RawElement) -> Element {
+ if raw >= 0 {
+ Element::Block(raw as usize)
+ } else if raw == -1 {
+ Element::None
+ } else {
+ Element::Rev(-raw - 2)
+ }
+ }
+}
+
+impl From<Element> for RawElement {
+ fn from(element: Element) -> RawElement {
+ match element {
+ Element::None => 0,
+ Element::Block(i) => i as RawElement,
+ Element::Rev(rev) => -rev - 2,
+ }
+ }
+}
+
+/// A logical block of the `NodeTree`, packed with a fixed size.
+///
+/// These are always used in container types implementing `Index<Block>`,
+/// such as `&Block`
+///
+/// As an array of integers, its ith element encodes that the
+/// ith potential edge from the block, representing the ith hexadecimal digit
+/// (nybble) `i` is either:
+///
+/// - absent (value -1)
+/// - another `Block` in the same indexable container (value ≥ 0)
+/// - a `Revision` leaf (value ≤ -2)
+///
+/// Endianness has to be fixed for consistency on shared storage across
+/// different architectures.
+///
+/// A key difference with the C `nodetree` is that we need to be
+/// able to represent the [`Block`] at index 0, hence -1 is the empty marker
+/// rather than 0 and the `Revision` range upper limit of -2 instead of -1.
+///
+/// Another related difference is that `NULL_REVISION` (-1) is not
+/// represented at all, because we want an immutable empty nodetree
+/// to be valid.
+
+#[derive(Copy, Clone)]
+pub struct Block([u8; BLOCK_SIZE]);
+
+/// Not derivable for arrays of length >32 until const generics are stable
+impl PartialEq for Block {
+ fn eq(&self, other: &Self) -> bool {
+ &self.0[..] == &other.0[..]
+ }
+}
+
+pub const BLOCK_SIZE: usize = 64;
+
+impl Block {
+ fn new() -> Self {
+ // -1 in 2's complement to create an absent node
+ let byte: u8 = 255;
+ Block([byte; BLOCK_SIZE])
+ }
+
+ fn get(&self, nybble: u8) -> Element {
+ let index = nybble as usize * mem::size_of::<RawElement>();
+ Element::from(RawElement::from_be_bytes([
+ self.0[index],
+ self.0[index + 1],
+ self.0[index + 2],
+ self.0[index + 3],
+ ]))
+ }
+
+ fn set(&mut self, nybble: u8, element: Element) {
+ let values = RawElement::to_be_bytes(element.into());
+ let index = nybble as usize * mem::size_of::<RawElement>();
+ self.0[index] = values[0];
+ self.0[index + 1] = values[1];
+ self.0[index + 2] = values[2];
+ self.0[index + 3] = values[3];
+ }
+}
+
+impl fmt::Debug for Block {
+ /// sparse representation for testing and debugging purposes
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_map()
+ .entries((0..16).filter_map(|i| match self.get(i) {
+ Element::None => None,
+ element => Some((i, element)),
+ }))
+ .finish()
+ }
+}
+
+/// A mutable 16-radix tree with the root block logically at the end
+///
+/// Because of the append only nature of our node trees, we need to
+/// keep the original untouched and store new blocks separately.
+///
+/// The mutable root `Block` is kept apart so that we don't have to rebump
+/// it on each insertion.
+pub struct NodeTree {
+ readonly: Box<dyn Deref<Target = [Block]> + Send>,
+ growable: Vec<Block>,
+ root: Block,
+ masked_inner_blocks: usize,
+}
+
+impl Index<usize> for NodeTree {
+ type Output = Block;
+
+ fn index(&self, i: usize) -> &Block {
+ let ro_len = self.readonly.len();
+ if i < ro_len {
+ &self.readonly[i]
+ } else if i == ro_len + self.growable.len() {
+ &self.root
+ } else {
+ &self.growable[i - ro_len]
+ }
+ }
+}
+
+/// Return `None` unless the `Node` for `rev` has given prefix in `index`.
+fn has_prefix_or_none(
+ idx: &impl RevlogIndex,
+ prefix: NodePrefixRef,
+ rev: Revision,
+) -> Result<Option<Revision>, NodeMapError> {
+ idx.node(rev)
+ .ok_or_else(|| NodeMapError::RevisionNotInIndex(rev))
+ .map(|node| {
+ if prefix.is_prefix_of(node) {
+ Some(rev)
+ } else {
+ None
+ }
+ })
+}
+
+/// validate that the candidate's node starts indeed with given prefix,
+/// and treat ambiguities related to `NULL_REVISION`.
+///
+/// From the data in the NodeTree, one can only conclude that some
+/// revision is the only one for a *subprefix* of the one being looked up.
+fn validate_candidate(
+ idx: &impl RevlogIndex,
+ prefix: NodePrefixRef,
+ candidate: (Option<Revision>, usize),
+) -> Result<(Option<Revision>, usize), NodeMapError> {
+ let (rev, steps) = candidate;
+ if let Some(nz_nybble) = prefix.first_different_nybble(&NULL_NODE) {
+ rev.map_or(Ok((None, steps)), |r| {
+ has_prefix_or_none(idx, prefix, r)
+ .map(|opt| (opt, max(steps, nz_nybble + 1)))
+ })
+ } else {
+ // the prefix is only made of zeros; NULL_REVISION always matches it
+ // and any other *valid* result is an ambiguity
+ match rev {
+ None => Ok((Some(NULL_REVISION), steps + 1)),
+ Some(r) => match has_prefix_or_none(idx, prefix, r)? {
+ None => Ok((Some(NULL_REVISION), steps + 1)),
+ _ => Err(NodeMapError::MultipleResults),
+ },
+ }
+ }
+}
+
+impl NodeTree {
+ /// Initiate a NodeTree from an immutable slice-like of `Block`
+ ///
+ /// We keep `readonly` and clone its root block if it isn't empty.
+ fn new(readonly: Box<dyn Deref<Target = [Block]> + Send>) -> Self {
+ let root = readonly
+ .last()
+ .map(|b| b.clone())
+ .unwrap_or_else(|| Block::new());
+ NodeTree {
+ readonly: readonly,
+ growable: Vec::new(),
+ root: root,
+ masked_inner_blocks: 0,
+ }
+ }
+
+ /// Create from an opaque bunch of bytes
+ ///
+ /// The created `NodeTreeBytes` from `buffer`,
+ /// of which exactly `amount` bytes are used.
+ ///
+ /// - `buffer` could be derived from `PyBuffer` and `Mmap` objects.
+ /// - `offset` allows for the final file format to include fixed data
+ /// (generation number, behavioural flags)
+ /// - `amount` is expressed in bytes, and is not automatically derived from
+ /// `bytes`, so that a caller that manages them atomically can perform
+ /// temporary disk serializations and still rollback easily if needed.
+ /// First use-case for this would be to support Mercurial shell hooks.
+ ///
+ /// panics if `buffer` is smaller than `amount`
+ pub fn load_bytes(
+ bytes: Box<dyn Deref<Target = [u8]> + Send>,
+ amount: usize,
+ ) -> Self {
+ NodeTree::new(Box::new(NodeTreeBytes::new(bytes, amount)))
+ }
+
+ /// Retrieve added `Block` and the original immutable data
+ pub fn into_readonly_and_added(
+ self,
+ ) -> (Box<dyn Deref<Target = [Block]> + Send>, Vec<Block>) {
+ let mut vec = self.growable;
+ let readonly = self.readonly;
+ if readonly.last() != Some(&self.root) {
+ vec.push(self.root);
+ }
+ (readonly, vec)
+ }
+
+ /// Retrieve added `Blocks` as bytes, ready to be written to persistent
+ /// storage
+ pub fn into_readonly_and_added_bytes(
+ self,
+ ) -> (Box<dyn Deref<Target = [Block]> + Send>, Vec<u8>) {
+ let (readonly, vec) = self.into_readonly_and_added();
+ // Prevent running `v`'s destructor so we are in complete control
+ // of the allocation.
+ let vec = mem::ManuallyDrop::new(vec);
+
+ // Transmute the `Vec<Block>` to a `Vec<u8>`. Blocks are contiguous
+ // bytes, so this is perfectly safe.
+ let bytes = unsafe {
+ // Assert that `Block` hasn't been changed and has no padding
+ let _: [u8; 4 * BLOCK_SIZE] =
+ std::mem::transmute([Block::new(); 4]);
+
+ // /!\ Any use of `vec` after this is use-after-free.
+ // TODO: use `into_raw_parts` once stabilized
+ Vec::from_raw_parts(
+ vec.as_ptr() as *mut u8,
+ vec.len() * BLOCK_SIZE,
+ vec.capacity() * BLOCK_SIZE,
+ )
+ };
+ (readonly, bytes)
+ }
+
+ /// Total number of blocks
+ fn len(&self) -> usize {
+ self.readonly.len() + self.growable.len() + 1
+ }
+
+ /// Implemented for completeness
+ ///
+ /// A `NodeTree` always has at least the mutable root block.
+ #[allow(dead_code)]
+ fn is_empty(&self) -> bool {
+ false
+ }
+
+ /// Main working method for `NodeTree` searches
+ ///
+ /// The first returned value is the result of analysing `NodeTree` data
+ /// *alone*: whereas `None` guarantees that the given prefix is absent
+ /// from the `NodeTree` data (but still could match `NULL_NODE`), with
+ /// `Some(rev)`, it is to be understood that `rev` is the unique `Revision`
+ /// that could match the prefix. Actually, all that can be inferred from
+ /// the `NodeTree` data is that `rev` is the revision with the longest
+ /// common node prefix with the given prefix.
+ ///
+ /// The second returned value is the size of the smallest subprefix
+ /// of `prefix` that would give the same result, i.e. not the
+ /// `MultipleResults` error variant (again, using only the data of the
+ /// `NodeTree`).
+ fn lookup(
+ &self,
+ prefix: NodePrefixRef,
+ ) -> Result<(Option<Revision>, usize), NodeMapError> {
+ for (i, visit_item) in self.visit(prefix).enumerate() {
+ if let Some(opt) = visit_item.final_revision() {
+ return Ok((opt, i + 1));
+ }
+ }
+ Err(NodeMapError::MultipleResults)
+ }
+
+ fn visit<'n, 'p>(
+ &'n self,
+ prefix: NodePrefixRef<'p>,
+ ) -> NodeTreeVisitor<'n, 'p> {
+ NodeTreeVisitor {
+ nt: self,
+ prefix: prefix,
+ visit: self.len() - 1,
+ nybble_idx: 0,
+ done: false,
+ }
+ }
+ /// Return a mutable reference for `Block` at index `idx`.
+ ///
+ /// If `idx` lies in the immutable area, then the reference is to
+ /// a newly appended copy.
+ ///
+ /// Returns (new_idx, glen, mut_ref) where
+ ///
+ /// - `new_idx` is the index of the mutable `Block`
+ /// - `mut_ref` is a mutable reference to the mutable Block.
+ /// - `glen` is the new length of `self.growable`
+ ///
+ /// Note: the caller wouldn't be allowed to query `self.growable.len()`
+ /// itself because of the mutable borrow taken with the returned `Block`
+ fn mutable_block(&mut self, idx: usize) -> (usize, &mut Block, usize) {
+ let ro_blocks = &self.readonly;
+ let ro_len = ro_blocks.len();
+ let glen = self.growable.len();
+ if idx < ro_len {
+ self.masked_inner_blocks += 1;
+ // TODO OPTIM I think this makes two copies
+ self.growable.push(ro_blocks[idx].clone());
+ (glen + ro_len, &mut self.growable[glen], glen + 1)
+ } else if glen + ro_len == idx {
+ (idx, &mut self.root, glen)
+ } else {
+ (idx, &mut self.growable[idx - ro_len], glen)
+ }
+ }
+
+ /// Main insertion method
+ ///
+ /// This will dive in the node tree to find the deepest `Block` for
+ /// `node`, split it as much as needed and record `node` in there.
+ /// The method then backtracks, updating references in all the visited
+ /// blocks from the root.
+ ///
+ /// All the mutated `Block` are copied first to the growable part if
+ /// needed. That happens for those in the immutable part except the root.
+ pub fn insert<I: RevlogIndex>(
+ &mut self,
+ index: &I,
+ node: &Node,
+ rev: Revision,
+ ) -> Result<(), NodeMapError> {
+ let ro_len = &self.readonly.len();
+
+ let mut visit_steps: Vec<_> = self.visit(node.into()).collect();
+ let read_nybbles = visit_steps.len();
+ // visit_steps cannot be empty, since we always visit the root block
+ let deepest = visit_steps.pop().unwrap();
+
+ let (mut block_idx, mut block, mut glen) =
+ self.mutable_block(deepest.block_idx);
+
+ if let Element::Rev(old_rev) = deepest.element {
+ let old_node = index
+ .node(old_rev)
+ .ok_or_else(|| NodeMapError::RevisionNotInIndex(old_rev))?;
+ if old_node == node {
+ return Ok(()); // avoid creating lots of useless blocks
+ }
+
+ // Looping over the tail of nybbles in both nodes, creating
+ // new blocks until we find the difference
+ let mut new_block_idx = ro_len + glen;
+ let mut nybble = deepest.nybble;
+ for nybble_pos in read_nybbles..node.nybbles_len() {
+ block.set(nybble, Element::Block(new_block_idx));
+
+ let new_nybble = node.get_nybble(nybble_pos);
+ let old_nybble = old_node.get_nybble(nybble_pos);
+
+ if old_nybble == new_nybble {
+ self.growable.push(Block::new());
+ block = &mut self.growable[glen];
+ glen += 1;
+ new_block_idx += 1;
+ nybble = new_nybble;
+ } else {
+ let mut new_block = Block::new();
+ new_block.set(old_nybble, Element::Rev(old_rev));
+ new_block.set(new_nybble, Element::Rev(rev));
+ self.growable.push(new_block);
+ break;
+ }
+ }
+ } else {
+ // Free slot in the deepest block: no splitting has to be done
+ block.set(deepest.nybble, Element::Rev(rev));
+ }
+
+ // Backtrack over visit steps to update references
+ while let Some(visited) = visit_steps.pop() {
+ let to_write = Element::Block(block_idx);
+ if visit_steps.is_empty() {
+ self.root.set(visited.nybble, to_write);
+ break;
+ }
+ let (new_idx, block, _) = self.mutable_block(visited.block_idx);
+ if block.get(visited.nybble) == to_write {
+ break;
+ }
+ block.set(visited.nybble, to_write);
+ block_idx = new_idx;
+ }
+ Ok(())
+ }
+
+ /// Make the whole `NodeTree` logically empty, without touching the
+ /// immutable part.
+ pub fn invalidate_all(&mut self) {
+ self.root = Block::new();
+ self.growable = Vec::new();
+ self.masked_inner_blocks = self.readonly.len();
+ }
+
+ /// Return the number of blocks in the readonly part that are currently
+ /// masked in the mutable part.
+ ///
+ /// The `NodeTree` structure has no efficient way to know how many blocks
+ /// are already unreachable in the readonly part.
+ ///
+ /// After a call to `invalidate_all()`, the returned number can be actually
+ /// bigger than the whole readonly part, a conventional way to mean that
+ /// all the readonly blocks have been masked. This is what is really
+ /// useful to the caller and does not require to know how many were
+ /// actually unreachable to begin with.
+ pub fn masked_readonly_blocks(&self) -> usize {
+ if let Some(readonly_root) = self.readonly.last() {
+ if readonly_root == &self.root {
+ return 0;
+ }
+ } else {
+ return 0;
+ }
+ self.masked_inner_blocks + 1
+ }
+}
+
+pub struct NodeTreeBytes {
+ buffer: Box<dyn Deref<Target = [u8]> + Send>,
+ len_in_blocks: usize,
+}
+
+impl NodeTreeBytes {
+ fn new(
+ buffer: Box<dyn Deref<Target = [u8]> + Send>,
+ amount: usize,
+ ) -> Self {
+ assert!(buffer.len() >= amount);
+ let len_in_blocks = amount / BLOCK_SIZE;
+ NodeTreeBytes {
+ buffer,
+ len_in_blocks,
+ }
+ }
+}
+
+impl Deref for NodeTreeBytes {
+ type Target = [Block];
+
+ fn deref(&self) -> &[Block] {
+ unsafe {
+ slice::from_raw_parts(
+ (&self.buffer).as_ptr() as *const Block,
+ self.len_in_blocks,
+ )
+ }
+ }
+}
+
+struct NodeTreeVisitor<'n, 'p> {
+ nt: &'n NodeTree,
+ prefix: NodePrefixRef<'p>,
+ visit: usize,
+ nybble_idx: usize,
+ done: bool,
+}
+
+#[derive(Debug, PartialEq, Clone)]
+struct NodeTreeVisitItem {
+ block_idx: usize,
+ nybble: u8,
+ element: Element,
+}
+
+impl<'n, 'p> Iterator for NodeTreeVisitor<'n, 'p> {
+ type Item = NodeTreeVisitItem;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.done || self.nybble_idx >= self.prefix.len() {
+ return None;
+ }
+
+ let nybble = self.prefix.get_nybble(self.nybble_idx);
+ self.nybble_idx += 1;
+
+ let visit = self.visit;
+ let element = self.nt[visit].get(nybble);
+ if let Element::Block(idx) = element {
+ self.visit = idx;
+ } else {
+ self.done = true;
+ }
+
+ Some(NodeTreeVisitItem {
+ block_idx: visit,
+ nybble: nybble,
+ element: element,
+ })
+ }
+}
+
+impl NodeTreeVisitItem {
+ // Return `Some(opt)` if this item is final, with `opt` being the
+ // `Revision` that it may represent.
+ //
+ // If the item is not terminal, return `None`
+ fn final_revision(&self) -> Option<Option<Revision>> {
+ match self.element {
+ Element::Block(_) => None,
+ Element::Rev(r) => Some(Some(r)),
+ Element::None => Some(None),
+ }
+ }
+}
+
+impl From<Vec<Block>> for NodeTree {
+ fn from(vec: Vec<Block>) -> Self {
+ Self::new(Box::new(vec))
+ }
+}
+
+impl fmt::Debug for NodeTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let readonly: &[Block] = &*self.readonly;
+ write!(
+ f,
+ "readonly: {:?}, growable: {:?}, root: {:?}",
+ readonly, self.growable, self.root
+ )
+ }
+}
+
+impl Default for NodeTree {
+ /// Create a fully mutable empty NodeTree
+ fn default() -> Self {
+ NodeTree::new(Box::new(Vec::new()))
+ }
+}
+
+impl NodeMap for NodeTree {
+ fn find_bin<'a>(
+ &self,
+ idx: &impl RevlogIndex,
+ prefix: NodePrefixRef<'a>,
+ ) -> Result<Option<Revision>, NodeMapError> {
+ validate_candidate(idx, prefix.clone(), self.lookup(prefix)?)
+ .map(|(opt, _shortest)| opt)
+ }
+
+ fn unique_prefix_len_bin<'a>(
+ &self,
+ idx: &impl RevlogIndex,
+ prefix: NodePrefixRef<'a>,
+ ) -> Result<Option<usize>, NodeMapError> {
+ validate_candidate(idx, prefix.clone(), self.lookup(prefix)?)
+ .map(|(opt, shortest)| opt.map(|_rev| shortest))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::NodeMapError::*;
+ use super::*;
+ use crate::revlog::node::{hex_pad_right, Node};
+ use std::collections::HashMap;
+
+ /// Creates a `Block` using a syntax close to the `Debug` output
+ macro_rules! block {
+ {$($nybble:tt : $variant:ident($val:tt)),*} => (
+ {
+ let mut block = Block::new();
+ $(block.set($nybble, Element::$variant($val)));*;
+ block
+ }
+ )
+ }
+
+ #[test]
+ fn test_block_debug() {
+ let mut block = Block::new();
+ block.set(1, Element::Rev(3));
+ block.set(10, Element::Block(0));
+ assert_eq!(format!("{:?}", block), "{1: Rev(3), 10: Block(0)}");
+ }
+
+ #[test]
+ fn test_block_macro() {
+ let block = block! {5: Block(2)};
+ assert_eq!(format!("{:?}", block), "{5: Block(2)}");
+
+ let block = block! {13: Rev(15), 5: Block(2)};
+ assert_eq!(format!("{:?}", block), "{5: Block(2), 13: Rev(15)}");
+ }
+
+ #[test]
+ fn test_raw_block() {
+ let mut raw = [255u8; 64];
+
+ let mut counter = 0;
+ for val in [0, 15, -2, -1, -3].iter() {
+ for byte in RawElement::to_be_bytes(*val).iter() {
+ raw[counter] = *byte;
+ counter += 1;
+ }
+ }
+ let block = Block(raw);
+ assert_eq!(block.get(0), Element::Block(0));
+ assert_eq!(block.get(1), Element::Block(15));
+ assert_eq!(block.get(3), Element::None);
+ assert_eq!(block.get(2), Element::Rev(0));
+ assert_eq!(block.get(4), Element::Rev(1));
+ }
+
+ type TestIndex = HashMap<Revision, Node>;
+
+ impl RevlogIndex for TestIndex {
+ fn node(&self, rev: Revision) -> Option<&Node> {
+ self.get(&rev)
+ }
+
+ fn len(&self) -> usize {
+ self.len()
+ }
+ }
+
+ /// Pad hexadecimal Node prefix with zeros on the right
+ ///
+ /// This avoids having to repeatedly write very long hexadecimal
+ /// strings for test data, and brings actual hash size independency.
+ #[cfg(test)]
+ fn pad_node(hex: &str) -> Node {
+ Node::from_hex(&hex_pad_right(hex)).unwrap()
+ }
+
+ /// Pad hexadecimal Node prefix with zeros on the right, then insert
+ fn pad_insert(idx: &mut TestIndex, rev: Revision, hex: &str) {
+ idx.insert(rev, pad_node(hex));
+ }
+
+ fn sample_nodetree() -> NodeTree {
+ NodeTree::from(vec![
+ block![0: Rev(9)],
+ block![0: Rev(0), 1: Rev(9)],
+ block![0: Block(1), 1:Rev(1)],
+ ])
+ }
+
+ #[test]
+ fn test_nt_debug() {
+ let nt = sample_nodetree();
+ assert_eq!(
+ format!("{:?}", nt),
+ "readonly: \
+ [{0: Rev(9)}, {0: Rev(0), 1: Rev(9)}, {0: Block(1), 1: Rev(1)}], \
+ growable: [], \
+ root: {0: Block(1), 1: Rev(1)}",
+ );
+ }
+
+ #[test]
+ fn test_immutable_find_simplest() -> Result<(), NodeMapError> {
+ let mut idx: TestIndex = HashMap::new();
+ pad_insert(&mut idx, 1, "1234deadcafe");
+
+ let nt = NodeTree::from(vec![block! {1: Rev(1)}]);
+ assert_eq!(nt.find_hex(&idx, "1")?, Some(1));
+ assert_eq!(nt.find_hex(&idx, "12")?, Some(1));
+ assert_eq!(nt.find_hex(&idx, "1234de")?, Some(1));
+ assert_eq!(nt.find_hex(&idx, "1a")?, None);
+ assert_eq!(nt.find_hex(&idx, "ab")?, None);
+
+ // and with full binary Nodes
+ assert_eq!(nt.find_node(&idx, idx.get(&1).unwrap())?, Some(1));
+ let unknown = Node::from_hex(&hex_pad_right("3d")).unwrap();
+ assert_eq!(nt.find_node(&idx, &unknown)?, None);
+ Ok(())
+ }
+
+ #[test]
+ fn test_immutable_find_one_jump() {
+ let mut idx = TestIndex::new();
+ pad_insert(&mut idx, 9, "012");
+ pad_insert(&mut idx, 0, "00a");
+
+ let nt = sample_nodetree();
+
+ assert_eq!(nt.find_hex(&idx, "0"), Err(MultipleResults));
+ assert_eq!(nt.find_hex(&idx, "01"), Ok(Some(9)));
+ assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults));
+ assert_eq!(nt.find_hex(&idx, "00a"), Ok(Some(0)));
+ assert_eq!(nt.unique_prefix_len_hex(&idx, "00a"), Ok(Some(3)));
+ assert_eq!(nt.find_hex(&idx, "000"), Ok(Some(NULL_REVISION)));
+ }
+
+ #[test]
+ fn test_mutated_find() -> Result<(), NodeMapError> {
+ let mut idx = TestIndex::new();
+ pad_insert(&mut idx, 9, "012");
+ pad_insert(&mut idx, 0, "00a");
+ pad_insert(&mut idx, 2, "cafe");
+ pad_insert(&mut idx, 3, "15");
+ pad_insert(&mut idx, 1, "10");
+
+ let nt = NodeTree {
+ readonly: sample_nodetree().readonly,
+ growable: vec![block![0: Rev(1), 5: Rev(3)]],
+ root: block![0: Block(1), 1:Block(3), 12: Rev(2)],
+ masked_inner_blocks: 1,
+ };
+ assert_eq!(nt.find_hex(&idx, "10")?, Some(1));
+ assert_eq!(nt.find_hex(&idx, "c")?, Some(2));
+ assert_eq!(nt.unique_prefix_len_hex(&idx, "c")?, Some(1));
+ assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults));
+ assert_eq!(nt.find_hex(&idx, "000")?, Some(NULL_REVISION));
+ assert_eq!(nt.unique_prefix_len_hex(&idx, "000")?, Some(3));
+ assert_eq!(nt.find_hex(&idx, "01")?, Some(9));
+ assert_eq!(nt.masked_readonly_blocks(), 2);
+ Ok(())
+ }
+
+ struct TestNtIndex {
+ index: TestIndex,
+ nt: NodeTree,
+ }
+
+ impl TestNtIndex {
+ fn new() -> Self {
+ TestNtIndex {
+ index: HashMap::new(),
+ nt: NodeTree::default(),
+ }
+ }
+
+ fn insert(
+ &mut self,
+ rev: Revision,
+ hex: &str,
+ ) -> Result<(), NodeMapError> {
+ let node = pad_node(hex);
+ self.index.insert(rev, node.clone());
+ self.nt.insert(&self.index, &node, rev)?;
+ Ok(())
+ }
+
+ fn find_hex(
+ &self,
+ prefix: &str,
+ ) -> Result<Option<Revision>, NodeMapError> {
+ self.nt.find_hex(&self.index, prefix)
+ }
+
+ fn unique_prefix_len_hex(
+ &self,
+ prefix: &str,
+ ) -> Result<Option<usize>, NodeMapError> {
+ self.nt.unique_prefix_len_hex(&self.index, prefix)
+ }
+
+ /// Drain `added` and restart a new one
+ fn commit(self) -> Self {
+ let mut as_vec: Vec<Block> =
+ self.nt.readonly.iter().map(|block| block.clone()).collect();
+ as_vec.extend(self.nt.growable);
+ as_vec.push(self.nt.root);
+
+ Self {
+ index: self.index,
+ nt: NodeTree::from(as_vec).into(),
+ }
+ }
+ }
+
+ #[test]
+ fn test_insert_full_mutable() -> Result<(), NodeMapError> {
+ let mut idx = TestNtIndex::new();
+ idx.insert(0, "1234")?;
+ assert_eq!(idx.find_hex("1")?, Some(0));
+ assert_eq!(idx.find_hex("12")?, Some(0));
+
+ // let's trigger a simple split
+ idx.insert(1, "1a34")?;
+ assert_eq!(idx.nt.growable.len(), 1);
+ assert_eq!(idx.find_hex("12")?, Some(0));
+ assert_eq!(idx.find_hex("1a")?, Some(1));
+
+ // reinserting is a no_op
+ idx.insert(1, "1a34")?;
+ assert_eq!(idx.nt.growable.len(), 1);
+ assert_eq!(idx.find_hex("12")?, Some(0));
+ assert_eq!(idx.find_hex("1a")?, Some(1));
+
+ idx.insert(2, "1a01")?;
+ assert_eq!(idx.nt.growable.len(), 2);
+ assert_eq!(idx.find_hex("1a"), Err(NodeMapError::MultipleResults));
+ assert_eq!(idx.find_hex("12")?, Some(0));
+ assert_eq!(idx.find_hex("1a3")?, Some(1));
+ assert_eq!(idx.find_hex("1a0")?, Some(2));
+ assert_eq!(idx.find_hex("1a12")?, None);
+
+ // now let's make it split and create more than one additional block
+ idx.insert(3, "1a345")?;
+ assert_eq!(idx.nt.growable.len(), 4);
+ assert_eq!(idx.find_hex("1a340")?, Some(1));
+ assert_eq!(idx.find_hex("1a345")?, Some(3));
+ assert_eq!(idx.find_hex("1a341")?, None);
+
+ // there's no readonly block to mask
+ assert_eq!(idx.nt.masked_readonly_blocks(), 0);
+ Ok(())
+ }
+
+ #[test]
+ fn test_unique_prefix_len_zero_prefix() {
+ let mut idx = TestNtIndex::new();
+ idx.insert(0, "00000abcd").unwrap();
+
+ assert_eq!(idx.find_hex("000"), Err(NodeMapError::MultipleResults));
+ // in the nodetree proper, this will be found at the first nybble
+ // yet the correct answer for unique_prefix_len is not 1, nor 1+1,
+ // but the first difference with `NULL_NODE`
+ assert_eq!(idx.unique_prefix_len_hex("00000a"), Ok(Some(6)));
+ assert_eq!(idx.unique_prefix_len_hex("00000ab"), Ok(Some(6)));
+
+ // same with odd result
+ idx.insert(1, "00123").unwrap();
+ assert_eq!(idx.unique_prefix_len_hex("001"), Ok(Some(3)));
+ assert_eq!(idx.unique_prefix_len_hex("0012"), Ok(Some(3)));
+
+ // these are unchanged of course
+ assert_eq!(idx.unique_prefix_len_hex("00000a"), Ok(Some(6)));
+ assert_eq!(idx.unique_prefix_len_hex("00000ab"), Ok(Some(6)));
+ }
+
+ #[test]
+ fn test_insert_extreme_splitting() -> Result<(), NodeMapError> {
+ // check that the splitting loop is long enough
+ let mut nt_idx = TestNtIndex::new();
+ let nt = &mut nt_idx.nt;
+ let idx = &mut nt_idx.index;
+
+ let node0_hex = hex_pad_right("444444");
+ let mut node1_hex = hex_pad_right("444444").clone();
+ node1_hex.pop();
+ node1_hex.push('5');
+ let node0 = Node::from_hex(&node0_hex).unwrap();
+ let node1 = Node::from_hex(&node1_hex).unwrap();
+
+ idx.insert(0, node0.clone());
+ nt.insert(idx, &node0, 0)?;
+ idx.insert(1, node1.clone());
+ nt.insert(idx, &node1, 1)?;
+
+ assert_eq!(nt.find_bin(idx, (&node0).into())?, Some(0));
+ assert_eq!(nt.find_bin(idx, (&node1).into())?, Some(1));
+ Ok(())
+ }
+
+ #[test]
+ fn test_insert_partly_immutable() -> Result<(), NodeMapError> {
+ let mut idx = TestNtIndex::new();
+ idx.insert(0, "1234")?;
+ idx.insert(1, "1235")?;
+ idx.insert(2, "131")?;
+ idx.insert(3, "cafe")?;
+ let mut idx = idx.commit();
+ assert_eq!(idx.find_hex("1234")?, Some(0));
+ assert_eq!(idx.find_hex("1235")?, Some(1));
+ assert_eq!(idx.find_hex("131")?, Some(2));
+ assert_eq!(idx.find_hex("cafe")?, Some(3));
+ // we did not add anything since init from readonly
+ assert_eq!(idx.nt.masked_readonly_blocks(), 0);
+
+ idx.insert(4, "123A")?;
+ assert_eq!(idx.find_hex("1234")?, Some(0));
+ assert_eq!(idx.find_hex("1235")?, Some(1));
+ assert_eq!(idx.find_hex("131")?, Some(2));
+ assert_eq!(idx.find_hex("cafe")?, Some(3));
+ assert_eq!(idx.find_hex("123A")?, Some(4));
+ // we masked blocks for all prefixes of "123", including the root
+ assert_eq!(idx.nt.masked_readonly_blocks(), 4);
+
+ eprintln!("{:?}", idx.nt);
+ idx.insert(5, "c0")?;
+ assert_eq!(idx.find_hex("cafe")?, Some(3));
+ assert_eq!(idx.find_hex("c0")?, Some(5));
+ assert_eq!(idx.find_hex("c1")?, None);
+ assert_eq!(idx.find_hex("1234")?, Some(0));
+ // inserting "c0" is just splitting the 'c' slot of the mutable root,
+ // it doesn't mask anything
+ assert_eq!(idx.nt.masked_readonly_blocks(), 4);
+
+ Ok(())
+ }
+
+ #[test]
+ fn test_invalidate_all() -> Result<(), NodeMapError> {
+ let mut idx = TestNtIndex::new();
+ idx.insert(0, "1234")?;
+ idx.insert(1, "1235")?;
+ idx.insert(2, "131")?;
+ idx.insert(3, "cafe")?;
+ let mut idx = idx.commit();
+
+ idx.nt.invalidate_all();
+
+ assert_eq!(idx.find_hex("1234")?, None);
+ assert_eq!(idx.find_hex("1235")?, None);
+ assert_eq!(idx.find_hex("131")?, None);
+ assert_eq!(idx.find_hex("cafe")?, None);
+ // all the readonly blocks have been masked, this is the
+ // conventional expected response
+ assert_eq!(idx.nt.masked_readonly_blocks(), idx.nt.readonly.len() + 1);
+ Ok(())
+ }
+
+ #[test]
+ fn test_into_added_empty() {
+ assert!(sample_nodetree().into_readonly_and_added().1.is_empty());
+ assert!(sample_nodetree()
+ .into_readonly_and_added_bytes()
+ .1
+ .is_empty());
+ }
+
+ #[test]
+ fn test_into_added_bytes() -> Result<(), NodeMapError> {
+ let mut idx = TestNtIndex::new();
+ idx.insert(0, "1234")?;
+ let mut idx = idx.commit();
+ idx.insert(4, "cafe")?;
+ let (_, bytes) = idx.nt.into_readonly_and_added_bytes();
+
+ // only the root block has been changed
+ assert_eq!(bytes.len(), BLOCK_SIZE);
+ // big endian for -2
+ assert_eq!(&bytes[4..2 * 4], [255, 255, 255, 254]);
+ // big endian for -6
+ assert_eq!(&bytes[12 * 4..13 * 4], [255, 255, 255, 250]);
+ Ok(())
+ }
+}
--- a/rust/hg-core/src/utils.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/utils.rs Thu Apr 16 22:51:09 2020 +0530
@@ -7,8 +7,12 @@
//! Contains useful functions, traits, structs, etc. for use in core.
+use crate::utils::hg_path::HgPath;
+use std::{io::Write, ops::Deref};
+
pub mod files;
pub mod hg_path;
+pub mod path_auditor;
/// Useful until rust/issues/56345 is stable
///
@@ -111,3 +115,54 @@
}
}
}
+
+pub trait Escaped {
+ /// Return bytes escaped for display to the user
+ fn escaped_bytes(&self) -> Vec<u8>;
+}
+
+impl Escaped for u8 {
+ fn escaped_bytes(&self) -> Vec<u8> {
+ let mut acc = vec![];
+ match self {
+ c @ b'\'' | c @ b'\\' => {
+ acc.push(b'\\');
+ acc.push(*c);
+ }
+ b'\t' => {
+ acc.extend(br"\\t");
+ }
+ b'\n' => {
+ acc.extend(br"\\n");
+ }
+ b'\r' => {
+ acc.extend(br"\\r");
+ }
+ c if (*c < b' ' || *c >= 127) => {
+ write!(acc, "\\x{:x}", self).unwrap();
+ }
+ c => {
+ acc.push(*c);
+ }
+ }
+ acc
+ }
+}
+
+impl<'a, T: Escaped> Escaped for &'a [T] {
+ fn escaped_bytes(&self) -> Vec<u8> {
+ self.iter().flat_map(|item| item.escaped_bytes()).collect()
+ }
+}
+
+impl<T: Escaped> Escaped for Vec<T> {
+ fn escaped_bytes(&self) -> Vec<u8> {
+ self.deref().escaped_bytes()
+ }
+}
+
+impl<'a> Escaped for &'a HgPath {
+ fn escaped_bytes(&self) -> Vec<u8> {
+ self.as_bytes().escaped_bytes()
+ }
+}
--- a/rust/hg-core/src/utils/files.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/utils/files.rs Thu Apr 16 22:51:09 2020 +0530
@@ -9,11 +9,18 @@
//! Functions for fiddling with files.
-use crate::utils::hg_path::{HgPath, HgPathBuf};
+use crate::utils::{
+ hg_path::{path_to_hg_path_buf, HgPath, HgPathBuf, HgPathError},
+ path_auditor::PathAuditor,
+ replace_slice,
+};
+use lazy_static::lazy_static;
+use same_file::is_same_file;
+use std::borrow::ToOwned;
+use std::fs::Metadata;
use std::iter::FusedIterator;
-
-use std::fs::Metadata;
-use std::path::Path;
+use std::ops::Deref;
+use std::path::{Path, PathBuf};
pub fn get_path_from_bytes(bytes: &[u8]) -> &Path {
let os_str;
@@ -62,6 +69,28 @@
impl<'a> FusedIterator for Ancestors<'a> {}
+/// An iterator over repository path yielding itself and its ancestors.
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct AncestorsWithBase<'a> {
+ next: Option<(&'a HgPath, &'a HgPath)>,
+}
+
+impl<'a> Iterator for AncestorsWithBase<'a> {
+ type Item = (&'a HgPath, &'a HgPath);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let next = self.next;
+ self.next = match self.next {
+ Some((s, _)) if s.is_empty() => None,
+ Some((s, _)) => Some(s.split_filename()),
+ None => None,
+ };
+ next
+ }
+}
+
+impl<'a> FusedIterator for AncestorsWithBase<'a> {}
+
/// Returns an iterator yielding ancestor directories of the given repository
/// path.
///
@@ -77,6 +106,25 @@
dirs
}
+/// Returns an iterator yielding ancestor directories of the given repository
+/// path.
+///
+/// The path is separated by '/', and must not start with '/'.
+///
+/// The path itself isn't included unless it is b"" (meaning the root
+/// directory.)
+pub(crate) fn find_dirs_with_base<'a>(
+ path: &'a HgPath,
+) -> AncestorsWithBase<'a> {
+ let mut dirs = AncestorsWithBase {
+ next: Some((path, HgPath::new(b""))),
+ };
+ if !path.is_empty() {
+ dirs.next(); // skip itself
+ }
+ dirs
+}
+
/// TODO more than ASCII?
pub fn normalize_case(path: &HgPath) -> HgPathBuf {
#[cfg(windows)] // NTFS compares via upper()
@@ -85,6 +133,41 @@
path.to_ascii_lowercase()
}
+lazy_static! {
+ static ref IGNORED_CHARS: Vec<Vec<u8>> = {
+ [
+ 0x200c, 0x200d, 0x200e, 0x200f, 0x202a, 0x202b, 0x202c, 0x202d,
+ 0x202e, 0x206a, 0x206b, 0x206c, 0x206d, 0x206e, 0x206f, 0xfeff,
+ ]
+ .iter()
+ .map(|code| {
+ std::char::from_u32(*code)
+ .unwrap()
+ .encode_utf8(&mut [0; 3])
+ .bytes()
+ .collect()
+ })
+ .collect()
+ };
+}
+
+fn hfs_ignore_clean(bytes: &[u8]) -> Vec<u8> {
+ let mut buf = bytes.to_owned();
+ let needs_escaping = bytes.iter().any(|b| *b == b'\xe2' || *b == b'\xef');
+ if needs_escaping {
+ for forbidden in IGNORED_CHARS.iter() {
+ replace_slice(&mut buf, forbidden, &[])
+ }
+ buf
+ } else {
+ buf
+ }
+}
+
+pub fn lower_clean(bytes: &[u8]) -> Vec<u8> {
+ hfs_ignore_clean(&bytes.to_ascii_lowercase())
+}
+
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]
pub struct HgMetadata {
pub st_dev: u64,
@@ -111,9 +194,66 @@
}
}
+/// Returns the canonical path of `name`, given `cwd` and `root`
+pub fn canonical_path(
+ root: impl AsRef<Path>,
+ cwd: impl AsRef<Path>,
+ name: impl AsRef<Path>,
+) -> Result<PathBuf, HgPathError> {
+ // TODO add missing normalization for other platforms
+ let root = root.as_ref();
+ let cwd = cwd.as_ref();
+ let name = name.as_ref();
+
+ let name = if !name.is_absolute() {
+ root.join(&cwd).join(&name)
+ } else {
+ name.to_owned()
+ };
+ let auditor = PathAuditor::new(&root);
+ if name != root && name.starts_with(&root) {
+ let name = name.strip_prefix(&root).unwrap();
+ auditor.audit_path(path_to_hg_path_buf(name)?)?;
+ return Ok(name.to_owned());
+ } else if name == root {
+ return Ok("".into());
+ } else {
+ // Determine whether `name' is in the hierarchy at or beneath `root',
+ // by iterating name=name.parent() until it returns `None` (can't
+ // check name == '/', because that doesn't work on windows).
+ let mut name = name.deref();
+ let original_name = name.to_owned();
+ loop {
+ let same = is_same_file(&name, &root).unwrap_or(false);
+ if same {
+ if name == original_name {
+ // `name` was actually the same as root (maybe a symlink)
+ return Ok("".into());
+ }
+ // `name` is a symlink to root, so `original_name` is under
+ // root
+ let rel_path = original_name.strip_prefix(&name).unwrap();
+ auditor.audit_path(path_to_hg_path_buf(&rel_path)?)?;
+ return Ok(rel_path.to_owned());
+ }
+ name = match name.parent() {
+ None => break,
+ Some(p) => p,
+ };
+ }
+ // TODO hint to the user about using --cwd
+ // Bubble up the responsibility to Python for now
+ Err(HgPathError::NotUnderRoot {
+ path: original_name.to_owned(),
+ root: root.to_owned(),
+ })
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
+ use pretty_assertions::assert_eq;
#[test]
fn find_dirs_some() {
@@ -133,4 +273,112 @@
assert_eq!(dirs.next(), None);
assert_eq!(dirs.next(), None);
}
+
+ #[test]
+ fn test_find_dirs_with_base_some() {
+ let mut dirs = super::find_dirs_with_base(HgPath::new(b"foo/bar/baz"));
+ assert_eq!(
+ dirs.next(),
+ Some((HgPath::new(b"foo/bar"), HgPath::new(b"baz")))
+ );
+ assert_eq!(
+ dirs.next(),
+ Some((HgPath::new(b"foo"), HgPath::new(b"bar")))
+ );
+ assert_eq!(dirs.next(), Some((HgPath::new(b""), HgPath::new(b"foo"))));
+ assert_eq!(dirs.next(), None);
+ assert_eq!(dirs.next(), None);
+ }
+
+ #[test]
+ fn test_find_dirs_with_base_empty() {
+ let mut dirs = super::find_dirs_with_base(HgPath::new(b""));
+ assert_eq!(dirs.next(), Some((HgPath::new(b""), HgPath::new(b""))));
+ assert_eq!(dirs.next(), None);
+ assert_eq!(dirs.next(), None);
+ }
+
+ #[test]
+ fn test_canonical_path() {
+ let root = Path::new("/repo");
+ let cwd = Path::new("/dir");
+ let name = Path::new("filename");
+ assert_eq!(
+ canonical_path(root, cwd, name),
+ Err(HgPathError::NotUnderRoot {
+ path: PathBuf::from("/dir/filename"),
+ root: root.to_path_buf()
+ })
+ );
+
+ let root = Path::new("/repo");
+ let cwd = Path::new("/");
+ let name = Path::new("filename");
+ assert_eq!(
+ canonical_path(root, cwd, name),
+ Err(HgPathError::NotUnderRoot {
+ path: PathBuf::from("/filename"),
+ root: root.to_path_buf()
+ })
+ );
+
+ let root = Path::new("/repo");
+ let cwd = Path::new("/");
+ let name = Path::new("repo/filename");
+ assert_eq!(
+ canonical_path(root, cwd, name),
+ Ok(PathBuf::from("filename"))
+ );
+
+ let root = Path::new("/repo");
+ let cwd = Path::new("/repo");
+ let name = Path::new("filename");
+ assert_eq!(
+ canonical_path(root, cwd, name),
+ Ok(PathBuf::from("filename"))
+ );
+
+ let root = Path::new("/repo");
+ let cwd = Path::new("/repo/subdir");
+ let name = Path::new("filename");
+ assert_eq!(
+ canonical_path(root, cwd, name),
+ Ok(PathBuf::from("subdir/filename"))
+ );
+ }
+
+ #[test]
+ fn test_canonical_path_not_rooted() {
+ use std::fs::create_dir;
+ use tempfile::tempdir;
+
+ let base_dir = tempdir().unwrap();
+ let base_dir_path = base_dir.path();
+ let beneath_repo = base_dir_path.join("a");
+ let root = base_dir_path.join("a/b");
+ let out_of_repo = base_dir_path.join("c");
+ let under_repo_symlink = out_of_repo.join("d");
+
+ create_dir(&beneath_repo).unwrap();
+ create_dir(&root).unwrap();
+
+ // TODO make portable
+ std::os::unix::fs::symlink(&root, &out_of_repo).unwrap();
+
+ assert_eq!(
+ canonical_path(&root, Path::new(""), out_of_repo),
+ Ok(PathBuf::from(""))
+ );
+ assert_eq!(
+ canonical_path(&root, Path::new(""), &beneath_repo),
+ Err(HgPathError::NotUnderRoot {
+ path: beneath_repo.to_owned(),
+ root: root.to_owned()
+ })
+ );
+ assert_eq!(
+ canonical_path(&root, Path::new(""), &under_repo_symlink),
+ Ok(PathBuf::from("d"))
+ );
+ }
}
--- a/rust/hg-core/src/utils/hg_path.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/src/utils/hg_path.rs Thu Apr 16 22:51:09 2020 +0530
@@ -15,12 +15,35 @@
pub enum HgPathError {
/// Bytes from the invalid `HgPath`
LeadingSlash(Vec<u8>),
- /// Bytes and index of the second slash
- ConsecutiveSlashes(Vec<u8>, usize),
- /// Bytes and index of the null byte
- ContainsNullByte(Vec<u8>, usize),
+ ConsecutiveSlashes {
+ bytes: Vec<u8>,
+ second_slash_index: usize,
+ },
+ ContainsNullByte {
+ bytes: Vec<u8>,
+ null_byte_index: usize,
+ },
/// Bytes
DecodeError(Vec<u8>),
+ /// The rest come from audit errors
+ EndsWithSlash(HgPathBuf),
+ ContainsIllegalComponent(HgPathBuf),
+ /// Path is inside the `.hg` folder
+ InsideDotHg(HgPathBuf),
+ IsInsideNestedRepo {
+ path: HgPathBuf,
+ nested_repo: HgPathBuf,
+ },
+ TraversesSymbolicLink {
+ path: HgPathBuf,
+ symlink: HgPathBuf,
+ },
+ NotFsCompliant(HgPathBuf),
+ /// `path` is the smallest invalid path
+ NotUnderRoot {
+ path: PathBuf,
+ root: PathBuf,
+ },
}
impl ToString for HgPathError {
@@ -29,17 +52,55 @@
HgPathError::LeadingSlash(bytes) => {
format!("Invalid HgPath '{:?}': has a leading slash.", bytes)
}
- HgPathError::ConsecutiveSlashes(bytes, pos) => format!(
- "Invalid HgPath '{:?}': consecutive slahes at pos {}.",
+ HgPathError::ConsecutiveSlashes {
+ bytes,
+ second_slash_index: pos,
+ } => format!(
+ "Invalid HgPath '{:?}': consecutive slashes at pos {}.",
bytes, pos
),
- HgPathError::ContainsNullByte(bytes, pos) => format!(
+ HgPathError::ContainsNullByte {
+ bytes,
+ null_byte_index: pos,
+ } => format!(
"Invalid HgPath '{:?}': contains null byte at pos {}.",
bytes, pos
),
HgPathError::DecodeError(bytes) => {
format!("Invalid HgPath '{:?}': could not be decoded.", bytes)
}
+ HgPathError::EndsWithSlash(path) => {
+ format!("Audit failed for '{}': ends with a slash.", path)
+ }
+ HgPathError::ContainsIllegalComponent(path) => format!(
+ "Audit failed for '{}': contains an illegal component.",
+ path
+ ),
+ HgPathError::InsideDotHg(path) => format!(
+ "Audit failed for '{}': is inside the '.hg' folder.",
+ path
+ ),
+ HgPathError::IsInsideNestedRepo {
+ path,
+ nested_repo: nested,
+ } => format!(
+ "Audit failed for '{}': is inside a nested repository '{}'.",
+ path, nested
+ ),
+ HgPathError::TraversesSymbolicLink { path, symlink } => format!(
+ "Audit failed for '{}': traverses symbolic link '{}'.",
+ path, symlink
+ ),
+ HgPathError::NotFsCompliant(path) => format!(
+ "Audit failed for '{}': cannot be turned into a \
+ filesystem path.",
+ path
+ ),
+ HgPathError::NotUnderRoot { path, root } => format!(
+ "Audit failed for '{}': not under root {}.",
+ path.display(),
+ root.display()
+ ),
}
}
}
@@ -112,10 +173,40 @@
pub fn contains(&self, other: u8) -> bool {
self.inner.contains(&other)
}
- pub fn starts_with(&self, needle: impl AsRef<HgPath>) -> bool {
+ pub fn starts_with(&self, needle: impl AsRef<Self>) -> bool {
self.inner.starts_with(needle.as_ref().as_bytes())
}
- pub fn join<T: ?Sized + AsRef<HgPath>>(&self, other: &T) -> HgPathBuf {
+ pub fn trim_trailing_slash(&self) -> &Self {
+ Self::new(if self.inner.last() == Some(&b'/') {
+ &self.inner[..self.inner.len() - 1]
+ } else {
+ &self.inner[..]
+ })
+ }
+ /// Returns a tuple of slices `(base, filename)` resulting from the split
+ /// at the rightmost `/`, if any.
+ ///
+ /// # Examples:
+ ///
+ /// ```
+ /// use hg::utils::hg_path::HgPath;
+ ///
+ /// let path = HgPath::new(b"cool/hg/path").split_filename();
+ /// assert_eq!(path, (HgPath::new(b"cool/hg"), HgPath::new(b"path")));
+ ///
+ /// let path = HgPath::new(b"pathwithoutsep").split_filename();
+ /// assert_eq!(path, (HgPath::new(b""), HgPath::new(b"pathwithoutsep")));
+ /// ```
+ pub fn split_filename(&self) -> (&Self, &Self) {
+ match &self.inner.iter().rposition(|c| *c == b'/') {
+ None => (HgPath::new(""), &self),
+ Some(size) => (
+ HgPath::new(&self.inner[..*size]),
+ HgPath::new(&self.inner[*size + 1..]),
+ ),
+ }
+ }
+ pub fn join<T: ?Sized + AsRef<Self>>(&self, other: &T) -> HgPathBuf {
let mut inner = self.inner.to_owned();
if inner.len() != 0 && inner.last() != Some(&b'/') {
inner.push(b'/');
@@ -123,21 +214,103 @@
inner.extend(other.as_ref().bytes());
HgPathBuf::from_bytes(&inner)
}
+ pub fn parent(&self) -> &Self {
+ let inner = self.as_bytes();
+ HgPath::new(match inner.iter().rposition(|b| *b == b'/') {
+ Some(pos) => &inner[..pos],
+ None => &[],
+ })
+ }
/// Given a base directory, returns the slice of `self` relative to the
/// base directory. If `base` is not a directory (does not end with a
/// `b'/'`), returns `None`.
- pub fn relative_to(&self, base: impl AsRef<HgPath>) -> Option<&HgPath> {
+ pub fn relative_to(&self, base: impl AsRef<Self>) -> Option<&Self> {
let base = base.as_ref();
if base.is_empty() {
return Some(self);
}
let is_dir = base.as_bytes().ends_with(b"/");
if is_dir && self.starts_with(base) {
- Some(HgPath::new(&self.inner[base.len()..]))
+ Some(Self::new(&self.inner[base.len()..]))
} else {
None
}
}
+
+ #[cfg(windows)]
+ /// Copied from the Python stdlib's `os.path.splitdrive` implementation.
+ ///
+ /// Split a pathname into drive/UNC sharepoint and relative path
+ /// specifiers. Returns a 2-tuple (drive_or_unc, path); either part may
+ /// be empty.
+ ///
+ /// If you assign
+ /// result = split_drive(p)
+ /// It is always true that:
+ /// result[0] + result[1] == p
+ ///
+ /// If the path contained a drive letter, drive_or_unc will contain
+ /// everything up to and including the colon.
+ /// e.g. split_drive("c:/dir") returns ("c:", "/dir")
+ ///
+ /// If the path contained a UNC path, the drive_or_unc will contain the
+ /// host name and share up to but not including the fourth directory
+ /// separator character.
+ /// e.g. split_drive("//host/computer/dir") returns ("//host/computer",
+ /// "/dir")
+ ///
+ /// Paths cannot contain both a drive letter and a UNC path.
+ pub fn split_drive<'a>(&self) -> (&HgPath, &HgPath) {
+ let bytes = self.as_bytes();
+ let is_sep = |b| std::path::is_separator(b as char);
+
+ if self.len() < 2 {
+ (HgPath::new(b""), &self)
+ } else if is_sep(bytes[0])
+ && is_sep(bytes[1])
+ && (self.len() == 2 || !is_sep(bytes[2]))
+ {
+ // Is a UNC path:
+ // vvvvvvvvvvvvvvvvvvvv drive letter or UNC path
+ // \\machine\mountpoint\directory\etc\...
+ // directory ^^^^^^^^^^^^^^^
+
+ let machine_end_index = bytes[2..].iter().position(|b| is_sep(*b));
+ let mountpoint_start_index = if let Some(i) = machine_end_index {
+ i + 2
+ } else {
+ return (HgPath::new(b""), &self);
+ };
+
+ match bytes[mountpoint_start_index + 1..]
+ .iter()
+ .position(|b| is_sep(*b))
+ {
+ // A UNC path can't have two slashes in a row
+ // (after the initial two)
+ Some(0) => (HgPath::new(b""), &self),
+ Some(i) => {
+ let (a, b) =
+ bytes.split_at(mountpoint_start_index + 1 + i);
+ (HgPath::new(a), HgPath::new(b))
+ }
+ None => (&self, HgPath::new(b"")),
+ }
+ } else if bytes[1] == b':' {
+ // Drive path c:\directory
+ let (a, b) = bytes.split_at(2);
+ (HgPath::new(a), HgPath::new(b))
+ } else {
+ (HgPath::new(b""), &self)
+ }
+ }
+
+ #[cfg(unix)]
+ /// Split a pathname into drive and path. On Posix, drive is always empty.
+ pub fn split_drive(&self) -> (&HgPath, &HgPath) {
+ (HgPath::new(b""), &self)
+ }
+
/// Checks for errors in the path, short-circuiting at the first one.
/// This generates fine-grained errors useful for debugging.
/// To simply check if the path is valid during tests, use `is_valid`.
@@ -154,17 +327,17 @@
for (index, byte) in bytes.iter().enumerate() {
match byte {
0 => {
- return Err(HgPathError::ContainsNullByte(
- bytes.to_vec(),
- index,
- ))
+ return Err(HgPathError::ContainsNullByte {
+ bytes: bytes.to_vec(),
+ null_byte_index: index,
+ })
}
b'/' => {
if previous_byte.is_some() && previous_byte == Some(b'/') {
- return Err(HgPathError::ConsecutiveSlashes(
- bytes.to_vec(),
- index,
- ));
+ return Err(HgPathError::ConsecutiveSlashes {
+ bytes: bytes.to_vec(),
+ second_slash_index: index,
+ });
}
}
_ => (),
@@ -348,6 +521,7 @@
#[cfg(test)]
mod tests {
use super::*;
+ use pretty_assertions::assert_eq;
#[test]
fn test_path_states() {
@@ -356,11 +530,17 @@
HgPath::new(b"/").check_state()
);
assert_eq!(
- Err(HgPathError::ConsecutiveSlashes(b"a/b//c".to_vec(), 4)),
+ Err(HgPathError::ConsecutiveSlashes {
+ bytes: b"a/b//c".to_vec(),
+ second_slash_index: 4
+ }),
HgPath::new(b"a/b//c").check_state()
);
assert_eq!(
- Err(HgPathError::ContainsNullByte(b"a/b/\0c".to_vec(), 4)),
+ Err(HgPathError::ContainsNullByte {
+ bytes: b"a/b/\0c".to_vec(),
+ null_byte_index: 4
+ }),
HgPath::new(b"a/b/\0c").check_state()
);
// TODO test HgPathError::DecodeError for the Windows implementation.
@@ -473,4 +653,116 @@
let base = HgPath::new(b"ends/");
assert_eq!(Some(HgPath::new(b"with/dir/")), path.relative_to(base));
}
+
+ #[test]
+ #[cfg(unix)]
+ fn test_split_drive() {
+ // Taken from the Python stdlib's tests
+ assert_eq!(
+ HgPath::new(br"/foo/bar").split_drive(),
+ (HgPath::new(b""), HgPath::new(br"/foo/bar"))
+ );
+ assert_eq!(
+ HgPath::new(br"foo:bar").split_drive(),
+ (HgPath::new(b""), HgPath::new(br"foo:bar"))
+ );
+ assert_eq!(
+ HgPath::new(br":foo:bar").split_drive(),
+ (HgPath::new(b""), HgPath::new(br":foo:bar"))
+ );
+ // Also try NT paths; should not split them
+ assert_eq!(
+ HgPath::new(br"c:\foo\bar").split_drive(),
+ (HgPath::new(b""), HgPath::new(br"c:\foo\bar"))
+ );
+ assert_eq!(
+ HgPath::new(b"c:/foo/bar").split_drive(),
+ (HgPath::new(b""), HgPath::new(br"c:/foo/bar"))
+ );
+ assert_eq!(
+ HgPath::new(br"\\conky\mountpoint\foo\bar").split_drive(),
+ (
+ HgPath::new(b""),
+ HgPath::new(br"\\conky\mountpoint\foo\bar")
+ )
+ );
+ }
+
+ #[test]
+ #[cfg(windows)]
+ fn test_split_drive() {
+ assert_eq!(
+ HgPath::new(br"c:\foo\bar").split_drive(),
+ (HgPath::new(br"c:"), HgPath::new(br"\foo\bar"))
+ );
+ assert_eq!(
+ HgPath::new(b"c:/foo/bar").split_drive(),
+ (HgPath::new(br"c:"), HgPath::new(br"/foo/bar"))
+ );
+ assert_eq!(
+ HgPath::new(br"\\conky\mountpoint\foo\bar").split_drive(),
+ (
+ HgPath::new(br"\\conky\mountpoint"),
+ HgPath::new(br"\foo\bar")
+ )
+ );
+ assert_eq!(
+ HgPath::new(br"//conky/mountpoint/foo/bar").split_drive(),
+ (
+ HgPath::new(br"//conky/mountpoint"),
+ HgPath::new(br"/foo/bar")
+ )
+ );
+ assert_eq!(
+ HgPath::new(br"\\\conky\mountpoint\foo\bar").split_drive(),
+ (
+ HgPath::new(br""),
+ HgPath::new(br"\\\conky\mountpoint\foo\bar")
+ )
+ );
+ assert_eq!(
+ HgPath::new(br"///conky/mountpoint/foo/bar").split_drive(),
+ (
+ HgPath::new(br""),
+ HgPath::new(br"///conky/mountpoint/foo/bar")
+ )
+ );
+ assert_eq!(
+ HgPath::new(br"\\conky\\mountpoint\foo\bar").split_drive(),
+ (
+ HgPath::new(br""),
+ HgPath::new(br"\\conky\\mountpoint\foo\bar")
+ )
+ );
+ assert_eq!(
+ HgPath::new(br"//conky//mountpoint/foo/bar").split_drive(),
+ (
+ HgPath::new(br""),
+ HgPath::new(br"//conky//mountpoint/foo/bar")
+ )
+ );
+ // UNC part containing U+0130
+ assert_eq!(
+ HgPath::new(b"//conky/MOUNTPO\xc4\xb0NT/foo/bar").split_drive(),
+ (
+ HgPath::new(b"//conky/MOUNTPO\xc4\xb0NT"),
+ HgPath::new(br"/foo/bar")
+ )
+ );
+ }
+
+ #[test]
+ fn test_parent() {
+ let path = HgPath::new(b"");
+ assert_eq!(path.parent(), path);
+
+ let path = HgPath::new(b"a");
+ assert_eq!(path.parent(), HgPath::new(b""));
+
+ let path = HgPath::new(b"a/b");
+ assert_eq!(path.parent(), HgPath::new(b"a"));
+
+ let path = HgPath::new(b"a/other/b");
+ assert_eq!(path.parent(), HgPath::new(b"a/other"));
+ }
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/utils/path_auditor.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,232 @@
+// path_auditor.rs
+//
+// Copyright 2020
+// Raphaël Gomès <rgomes@octobus.net>,
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use crate::utils::{
+ files::lower_clean,
+ find_slice_in_slice,
+ hg_path::{hg_path_to_path_buf, HgPath, HgPathBuf, HgPathError},
+};
+use std::collections::HashSet;
+use std::path::{Path, PathBuf};
+use std::sync::{Mutex, RwLock};
+
+/// Ensures that a path is valid for use in the repository i.e. does not use
+/// any banned components, does not traverse a symlink, etc.
+#[derive(Debug, Default)]
+pub struct PathAuditor {
+ audited: Mutex<HashSet<HgPathBuf>>,
+ audited_dirs: RwLock<HashSet<HgPathBuf>>,
+ root: PathBuf,
+}
+
+impl PathAuditor {
+ pub fn new(root: impl AsRef<Path>) -> Self {
+ Self {
+ root: root.as_ref().to_owned(),
+ ..Default::default()
+ }
+ }
+ pub fn audit_path(
+ &self,
+ path: impl AsRef<HgPath>,
+ ) -> Result<(), HgPathError> {
+ // TODO windows "localpath" normalization
+ let path = path.as_ref();
+ if path.is_empty() {
+ return Ok(());
+ }
+ // TODO case normalization
+ if self.audited.lock().unwrap().contains(path) {
+ return Ok(());
+ }
+ // AIX ignores "/" at end of path, others raise EISDIR.
+ let last_byte = path.as_bytes()[path.len() - 1];
+ if last_byte == b'/' || last_byte == b'\\' {
+ return Err(HgPathError::EndsWithSlash(path.to_owned()));
+ }
+ let parts: Vec<_> = path
+ .as_bytes()
+ .split(|b| std::path::is_separator(*b as char))
+ .collect();
+
+ let first_component = lower_clean(parts[0]);
+ let first_component = first_component.as_slice();
+ if !path.split_drive().0.is_empty()
+ || (first_component == b".hg"
+ || first_component == b".hg."
+ || first_component == b"")
+ || parts.iter().any(|c| c == b"..")
+ {
+ return Err(HgPathError::InsideDotHg(path.to_owned()));
+ }
+
+ // Windows shortname aliases
+ for part in parts.iter() {
+ if part.contains(&b'~') {
+ let mut split = part.splitn(2, |b| *b == b'~');
+ let first =
+ split.next().unwrap().to_owned().to_ascii_uppercase();
+ let last = split.next().unwrap();
+ if last.iter().all(u8::is_ascii_digit)
+ && (first == b"HG" || first == b"HG8B6C")
+ {
+ return Err(HgPathError::ContainsIllegalComponent(
+ path.to_owned(),
+ ));
+ }
+ }
+ }
+ let lower_path = lower_clean(path.as_bytes());
+ if find_slice_in_slice(&lower_path, b".hg").is_some() {
+ let lower_parts: Vec<_> = path
+ .as_bytes()
+ .split(|b| std::path::is_separator(*b as char))
+ .collect();
+ for pattern in [b".hg".to_vec(), b".hg.".to_vec()].iter() {
+ if let Some(pos) = lower_parts[1..]
+ .iter()
+ .position(|part| part == &pattern.as_slice())
+ {
+ let base = lower_parts[..=pos]
+ .iter()
+ .fold(HgPathBuf::new(), |acc, p| {
+ acc.join(HgPath::new(p))
+ });
+ return Err(HgPathError::IsInsideNestedRepo {
+ path: path.to_owned(),
+ nested_repo: base,
+ });
+ }
+ }
+ }
+
+ let parts = &parts[..parts.len().saturating_sub(1)];
+
+ // We don't want to add "foo/bar/baz" to `audited_dirs` before checking
+ // if there's a "foo/.hg" directory. This also means we won't
+ // accidentally traverse a symlink into some other filesystem (which
+ // is potentially expensive to access).
+ for index in 0..parts.len() {
+ let prefix = &parts[..index + 1].join(&b'/');
+ let prefix = HgPath::new(prefix);
+ if self.audited_dirs.read().unwrap().contains(prefix) {
+ continue;
+ }
+ self.check_filesystem(&prefix, &path)?;
+ self.audited_dirs.write().unwrap().insert(prefix.to_owned());
+ }
+
+ self.audited.lock().unwrap().insert(path.to_owned());
+
+ Ok(())
+ }
+
+ pub fn check_filesystem(
+ &self,
+ prefix: impl AsRef<HgPath>,
+ path: impl AsRef<HgPath>,
+ ) -> Result<(), HgPathError> {
+ let prefix = prefix.as_ref();
+ let path = path.as_ref();
+ let current_path = self.root.join(
+ hg_path_to_path_buf(prefix)
+ .map_err(|_| HgPathError::NotFsCompliant(path.to_owned()))?,
+ );
+ match std::fs::symlink_metadata(¤t_path) {
+ Err(e) => {
+ // EINVAL can be raised as invalid path syntax under win32.
+ if e.kind() != std::io::ErrorKind::NotFound
+ && e.kind() != std::io::ErrorKind::InvalidInput
+ && e.raw_os_error() != Some(20)
+ {
+ // Rust does not yet have an `ErrorKind` for
+ // `NotADirectory` (errno 20)
+ // It happens if the dirstate contains `foo/bar` and
+ // foo is not a directory
+ return Err(HgPathError::NotFsCompliant(path.to_owned()));
+ }
+ }
+ Ok(meta) => {
+ if meta.file_type().is_symlink() {
+ return Err(HgPathError::TraversesSymbolicLink {
+ path: path.to_owned(),
+ symlink: prefix.to_owned(),
+ });
+ }
+ if meta.file_type().is_dir()
+ && current_path.join(".hg").is_dir()
+ {
+ return Err(HgPathError::IsInsideNestedRepo {
+ path: path.to_owned(),
+ nested_repo: prefix.to_owned(),
+ });
+ }
+ }
+ };
+
+ Ok(())
+ }
+
+ pub fn check(&self, path: impl AsRef<HgPath>) -> bool {
+ self.audit_path(path).is_ok()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::utils::files::get_path_from_bytes;
+ use crate::utils::hg_path::path_to_hg_path_buf;
+
+ #[test]
+ fn test_path_auditor() {
+ let auditor = PathAuditor::new(get_path_from_bytes(b"/tmp"));
+
+ let path = HgPath::new(b".hg/00changelog.i");
+ assert_eq!(
+ auditor.audit_path(path),
+ Err(HgPathError::InsideDotHg(path.to_owned()))
+ );
+ let path = HgPath::new(b"this/is/nested/.hg/thing.txt");
+ assert_eq!(
+ auditor.audit_path(path),
+ Err(HgPathError::IsInsideNestedRepo {
+ path: path.to_owned(),
+ nested_repo: HgPathBuf::from_bytes(b"this/is/nested")
+ })
+ );
+
+ use std::fs::{create_dir, File};
+ use tempfile::tempdir;
+
+ let base_dir = tempdir().unwrap();
+ let base_dir_path = base_dir.path();
+ let a = base_dir_path.join("a");
+ let b = base_dir_path.join("b");
+ create_dir(&a).unwrap();
+ let in_a_path = a.join("in_a");
+ File::create(in_a_path).unwrap();
+
+ // TODO make portable
+ std::os::unix::fs::symlink(&a, &b).unwrap();
+
+ let buf = b.join("in_a").components().skip(2).collect::<PathBuf>();
+ eprintln!("buf: {}", buf.display());
+ let path = path_to_hg_path_buf(buf).unwrap();
+ assert_eq!(
+ auditor.audit_path(&path),
+ Err(HgPathError::TraversesSymbolicLink {
+ path: path,
+ symlink: path_to_hg_path_buf(
+ b.components().skip(2).collect::<PathBuf>()
+ )
+ .unwrap()
+ })
+ );
+ }
+}
--- a/rust/hg-core/tests/test_missing_ancestors.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-core/tests/test_missing_ancestors.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,8 +1,9 @@
use hg::testing::VecGraph;
use hg::Revision;
use hg::*;
-use rand::distributions::{Distribution, LogNormal, Uniform};
+use rand::distributions::{Distribution, Uniform};
use rand::{thread_rng, Rng, RngCore, SeedableRng};
+use rand_distr::LogNormal;
use std::cmp::min;
use std::collections::HashSet;
use std::env;
@@ -191,7 +192,7 @@
let mu = mu_opt.unwrap_or(1.1);
let sigma = sigma_opt.unwrap_or(0.8);
- let log_normal = LogNormal::new(mu, sigma);
+ let log_normal = LogNormal::new(mu, sigma).unwrap();
let nb = min(maxrev as usize, log_normal.sample(rng).floor() as usize);
let dist = Uniform::from(NULL_REVISION..maxrev);
--- a/rust/hg-cpython/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/Cargo.toml Thu Apr 16 22:51:09 2020 +0530
@@ -10,6 +10,7 @@
[features]
default = ["python27"]
+with-re2 = ["hg-core/with-re2"]
# Features to build an extension module:
python27 = ["cpython/python27-sys", "cpython/extension-module-2-7"]
@@ -21,9 +22,11 @@
python3-bin = ["cpython/python3-sys"]
[dependencies]
-hg-core = { path = "../hg-core" }
+hg-core = { path = "../hg-core"}
libc = '*'
+log = "0.4.8"
+simple_logger = "1.6.0"
[dependencies.cpython]
-version = "0.3"
+version = "0.4.1"
default-features = false
--- a/rust/hg-cpython/src/cindex.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/cindex.rs Thu Apr 16 22:51:09 2020 +0530
@@ -10,15 +10,25 @@
//! Ideally, we should use an Index entirely implemented in Rust,
//! but this will take some time to get there.
-use cpython::{exc::ImportError, PyClone, PyErr, PyObject, PyResult, Python};
+use cpython::{
+ exc::ImportError, ObjectProtocol, PyClone, PyErr, PyObject, PyResult,
+ PyTuple, Python, PythonObject,
+};
+use hg::revlog::{Node, RevlogIndex};
use hg::{Graph, GraphError, Revision, WORKING_DIRECTORY_REVISION};
use libc::c_int;
-const REVLOG_CABI_VERSION: c_int = 1;
+const REVLOG_CABI_VERSION: c_int = 2;
#[repr(C)]
pub struct Revlog_CAPI {
abi_version: c_int,
+ index_length:
+ unsafe extern "C" fn(index: *mut revlog_capi::RawPyObject) -> c_int,
+ index_node: unsafe extern "C" fn(
+ index: *mut revlog_capi::RawPyObject,
+ rev: c_int,
+ ) -> *const Node,
index_parents: unsafe extern "C" fn(
index: *mut revlog_capi::RawPyObject,
rev: c_int,
@@ -90,6 +100,15 @@
pub fn inner(&self) -> &PyObject {
&self.index
}
+
+ pub fn append(&mut self, py: Python, tup: PyTuple) -> PyResult<PyObject> {
+ self.index.call_method(
+ py,
+ "append",
+ PyTuple::new(py, &[tup.into_object()]),
+ None,
+ )
+ }
}
impl Clone for Index {
@@ -131,3 +150,30 @@
}
}
}
+
+impl RevlogIndex for Index {
+ /// Note C return type is Py_ssize_t (hence signed), but we shall
+ /// force it to unsigned, because it's a length
+ fn len(&self) -> usize {
+ unsafe { (self.capi.index_length)(self.index.as_ptr()) as usize }
+ }
+
+ fn node<'a>(&'a self, rev: Revision) -> Option<&'a Node> {
+ let raw = unsafe {
+ (self.capi.index_node)(self.index.as_ptr(), rev as c_int)
+ };
+ if raw.is_null() {
+ None
+ } else {
+ // TODO it would be much better for the C layer to give us
+ // a length, since the hash length will change in the near
+ // future, but that's probably out of scope for the nodemap
+ // patch series.
+ //
+ // The root of that unsafety relies in the signature of
+ // `capi.index_node()` itself: returning a `Node` pointer
+ // whereas it's a `char *` in the C counterpart.
+ Some(unsafe { &*raw })
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/debug.rs Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,26 @@
+// debug.rs
+//
+// Copyright 2020 Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Module to get debug information about Rust extensions.
+use cpython::{PyDict, PyModule, PyResult, Python};
+
+/// Create the module, with `__package__` given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+ let dotted_name = &format!("{}.debug", package);
+ let m = PyModule::new(py, dotted_name)?;
+
+ m.add(py, "__package__", package)?;
+ m.add(py, "__doc__", "Rust debugging information")?;
+
+ m.add(py, "re2_installed", cfg!(feature = "with-re2"))?;
+
+ let sys = PyModule::import(py, "sys")?;
+ let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+ sys_modules.set_item(py, dotted_name, &m)?;
+
+ Ok(m)
+}
--- a/rust/hg-cpython/src/dirstate.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate.rs Thu Apr 16 22:51:09 2020 +0530
@@ -14,12 +14,15 @@
mod dirstate_map;
mod non_normal_entries;
mod status;
-use crate::dirstate::{
- dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
+use crate::{
+ dirstate::{
+ dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
+ },
+ exceptions,
};
use cpython::{
- exc, PyBytes, PyDict, PyErr, PyModule, PyObject, PyResult, PySequence,
- Python,
+ exc, PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult,
+ PySequence, Python,
};
use hg::{
utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState,
@@ -104,9 +107,16 @@
let dotted_name = &format!("{}.dirstate", package);
let m = PyModule::new(py, dotted_name)?;
+ simple_logger::init_by_env();
+
m.add(py, "__package__", package)?;
m.add(py, "__doc__", "Dirstate - Rust implementation")?;
+ m.add(
+ py,
+ "FallbackError",
+ py.get_type::<exceptions::FallbackError>(),
+ )?;
m.add_class::<Dirs>(py)?;
m.add_class::<DirstateMap>(py)?;
m.add(
@@ -118,9 +128,12 @@
dmap: DirstateMap,
root_dir: PyObject,
matcher: PyObject,
- list_clean: bool,
+ ignorefiles: PyList,
+ check_exec: bool,
last_normal_time: i64,
- check_exec: bool
+ list_clean: bool,
+ list_ignored: bool,
+ list_unknown: bool
)
),
)?;
--- a/rust/hg-cpython/src/dirstate/copymap.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate/copymap.rs Thu Apr 16 22:51:09 2020 +0530
@@ -8,11 +8,12 @@
//! Bindings for `hg::dirstate::dirstate_map::CopyMap` provided by the
//! `hg-core` package.
-use cpython::{PyBytes, PyClone, PyDict, PyObject, PyResult, Python};
+use cpython::{
+ PyBytes, PyClone, PyDict, PyObject, PyResult, Python, UnsafePyLeaked,
+};
use std::cell::RefCell;
use crate::dirstate::dirstate_map::DirstateMap;
-use crate::ref_sharing::PyLeaked;
use hg::{utils::hg_path::HgPathBuf, CopyMapIter};
py_class!(pub class CopyMap |py| {
@@ -104,14 +105,14 @@
py_shared_iterator!(
CopyMapKeysIterator,
- PyLeaked<CopyMapIter<'static>>,
+ UnsafePyLeaked<CopyMapIter<'static>>,
CopyMap::translate_key,
Option<PyBytes>
);
py_shared_iterator!(
CopyMapItemsIterator,
- PyLeaked<CopyMapIter<'static>>,
+ UnsafePyLeaked<CopyMapIter<'static>>,
CopyMap::translate_key_value,
Option<(PyBytes, PyBytes)>
);
--- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs Thu Apr 16 22:51:09 2020 +0530
@@ -13,11 +13,10 @@
use cpython::{
exc, ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyObject, PyResult,
- Python,
+ Python, UnsafePyLeaked,
};
use crate::dirstate::extract_dirstate;
-use crate::ref_sharing::{PyLeaked, PySharedRefCell};
use hg::{
utils::hg_path::{HgPath, HgPathBuf},
DirsMultiset, DirsMultisetIter, DirstateMapError, DirstateParseError,
@@ -25,7 +24,7 @@
};
py_class!(pub class Dirs |py| {
- data inner: PySharedRefCell<DirsMultiset>;
+ @shared data inner: DirsMultiset;
// `map` is either a `dict` or a flat iterator (usually a `set`, sometimes
// a `list`)
@@ -65,14 +64,11 @@
})?
};
- Self::create_instance(
- py,
- PySharedRefCell::new(inner),
- )
+ Self::create_instance(py, inner)
}
def addpath(&self, path: PyObject) -> PyResult<PyObject> {
- self.inner_shared(py).borrow_mut()?.add_path(
+ self.inner(py).borrow_mut().add_path(
HgPath::new(path.extract::<PyBytes>(py)?.data(py)),
).and(Ok(py.None())).or_else(|e| {
match e {
@@ -90,7 +86,7 @@
}
def delpath(&self, path: PyObject) -> PyResult<PyObject> {
- self.inner_shared(py).borrow_mut()?.delete_path(
+ self.inner(py).borrow_mut().delete_path(
HgPath::new(path.extract::<PyBytes>(py)?.data(py)),
)
.and(Ok(py.None()))
@@ -109,7 +105,7 @@
})
}
def __iter__(&self) -> PyResult<DirsMultisetKeysIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
DirsMultisetKeysIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.iter()) },
@@ -117,17 +113,15 @@
}
def __contains__(&self, item: PyObject) -> PyResult<bool> {
- Ok(self.inner_shared(py).borrow().contains(HgPath::new(
+ Ok(self.inner(py).borrow().contains(HgPath::new(
item.extract::<PyBytes>(py)?.data(py).as_ref(),
)))
}
});
-py_shared_ref!(Dirs, DirsMultiset, inner, inner_shared);
-
impl Dirs {
pub fn from_inner(py: Python, d: DirsMultiset) -> PyResult<Self> {
- Self::create_instance(py, PySharedRefCell::new(d))
+ Self::create_instance(py, d)
}
fn translate_key(
@@ -140,7 +134,7 @@
py_shared_iterator!(
DirsMultisetKeysIterator,
- PyLeaked<DirsMultisetIter<'static>>,
+ UnsafePyLeaked<DirsMultisetIter<'static>>,
Dirs::translate_key,
Option<PyBytes>
);
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs Thu Apr 16 22:51:09 2020 +0530
@@ -15,13 +15,15 @@
use cpython::{
exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject,
+ UnsafePyLeaked,
};
use crate::{
dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
- dirstate::non_normal_entries::NonNormalEntries,
+ dirstate::non_normal_entries::{
+ NonNormalEntries, NonNormalEntriesIterator,
+ },
dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
- ref_sharing::{PyLeaked, PySharedRefCell},
};
use hg::{
utils::hg_path::{HgPath, HgPathBuf},
@@ -43,18 +45,15 @@
// All attributes also have to have a separate refcount data attribute for
// leaks, with all methods that go along for reference sharing.
py_class!(pub class DirstateMap |py| {
- data inner: PySharedRefCell<RustDirstateMap>;
+ @shared data inner: RustDirstateMap;
def __new__(_cls, _root: PyObject) -> PyResult<Self> {
let inner = RustDirstateMap::default();
- Self::create_instance(
- py,
- PySharedRefCell::new(inner),
- )
+ Self::create_instance(py, inner)
}
def clear(&self) -> PyResult<PyObject> {
- self.inner_shared(py).borrow_mut()?.clear();
+ self.inner(py).borrow_mut().clear();
Ok(py.None())
}
@@ -64,7 +63,7 @@
default: Option<PyObject> = None
) -> PyResult<Option<PyObject>> {
let key = key.extract::<PyBytes>(py)?;
- match self.inner_shared(py).borrow().get(HgPath::new(key.data(py))) {
+ match self.inner(py).borrow().get(HgPath::new(key.data(py))) {
Some(entry) => {
Ok(Some(make_dirstate_tuple(py, entry)?))
},
@@ -81,7 +80,7 @@
size: PyObject,
mtime: PyObject
) -> PyResult<PyObject> {
- self.inner_shared(py).borrow_mut()?.add_file(
+ self.inner(py).borrow_mut().add_file(
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
@@ -109,7 +108,7 @@
oldstate: PyObject,
size: PyObject
) -> PyResult<PyObject> {
- self.inner_shared(py).borrow_mut()?
+ self.inner(py).borrow_mut()
.remove_file(
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
@@ -133,7 +132,7 @@
f: PyObject,
oldstate: PyObject
) -> PyResult<PyBool> {
- self.inner_shared(py).borrow_mut()?
+ self.inner(py).borrow_mut()
.drop_file(
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
@@ -164,13 +163,13 @@
))
})
.collect();
- self.inner_shared(py).borrow_mut()?
+ self.inner(py).borrow_mut()
.clear_ambiguous_times(files?, now.extract(py)?);
Ok(py.None())
}
def other_parent_entries(&self) -> PyResult<PyObject> {
- let mut inner_shared = self.inner_shared(py).borrow_mut()?;
+ let mut inner_shared = self.inner(py).borrow_mut();
let (_, other_parent) =
inner_shared.get_non_normal_other_parent_entries();
@@ -178,8 +177,7 @@
locals.set_item(
py,
"other_parent",
- other_parent.as_ref()
- .unwrap()
+ other_parent
.iter()
.map(|v| PyBytes::new(py, v.as_ref()))
.collect::<Vec<PyBytes>>()
@@ -196,11 +194,9 @@
def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
let key = key.extract::<PyBytes>(py)?;
Ok(self
- .inner_shared(py)
- .borrow_mut()?
+ .inner(py)
+ .borrow_mut()
.get_non_normal_other_parent_entries().0
- .as_ref()
- .unwrap()
.contains(HgPath::new(key.data(py))))
}
@@ -211,11 +207,10 @@
&format!(
"NonNormalEntries: {:?}",
self
- .inner_shared(py)
- .borrow_mut()?
+ .inner(py)
+ .borrow_mut()
.get_non_normal_other_parent_entries().0
- .as_ref()
- .unwrap().iter().map(|o| o))
+ .iter().map(|o| o))
)
)
}
@@ -223,8 +218,8 @@
def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
let key = key.extract::<PyBytes>(py)?;
self
- .inner_shared(py)
- .borrow_mut()?
+ .inner(py)
+ .borrow_mut()
.non_normal_entries_remove(HgPath::new(key.data(py)));
Ok(py.None())
}
@@ -239,21 +234,37 @@
.collect();
let res = self
- .inner_shared(py)
- .borrow_mut()?
+ .inner(py)
+ .borrow_mut()
.non_normal_entries_union(other?);
let ret = PyList::new(py, &[]);
- for (i, filename) in res.iter().enumerate() {
+ for filename in res.iter() {
let as_pystring = PyBytes::new(py, filename.as_bytes());
- ret.insert_item(py, i, as_pystring.into_object());
+ ret.append(py, as_pystring.into_object());
}
Ok(ret)
}
+ def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
+ // Make sure the sets are defined before we no longer have a mutable
+ // reference to the dmap.
+ self.inner(py)
+ .borrow_mut()
+ .set_non_normal_other_parent_entries(false);
+
+ let leaked_ref = self.inner(py).leak_immutable();
+
+ NonNormalEntriesIterator::from_inner(py, unsafe {
+ leaked_ref.map(py, |o| {
+ o.get_non_normal_other_parent_entries_panic().0.iter()
+ })
+ })
+ }
+
def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
let d = d.extract::<PyBytes>(py)?;
- Ok(self.inner_shared(py).borrow_mut()?
+ Ok(self.inner(py).borrow_mut()
.has_tracked_dir(HgPath::new(d.data(py)))
.map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
@@ -263,7 +274,7 @@
def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
let d = d.extract::<PyBytes>(py)?;
- Ok(self.inner_shared(py).borrow_mut()?
+ Ok(self.inner(py).borrow_mut()
.has_dir(HgPath::new(d.data(py)))
.map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
@@ -272,7 +283,7 @@
}
def parents(&self, st: PyObject) -> PyResult<PyTuple> {
- self.inner_shared(py).borrow_mut()?
+ self.inner(py).borrow_mut()
.parents(st.extract::<PyBytes>(py)?.data(py))
.and_then(|d| {
Ok((PyBytes::new(py, &d.p1), PyBytes::new(py, &d.p2))
@@ -290,13 +301,13 @@
let p1 = extract_node_id(py, &p1)?;
let p2 = extract_node_id(py, &p2)?;
- self.inner_shared(py).borrow_mut()?
+ self.inner(py).borrow_mut()
.set_parents(&DirstateParents { p1, p2 });
Ok(py.None())
}
def read(&self, st: PyObject) -> PyResult<Option<PyObject>> {
- match self.inner_shared(py).borrow_mut()?
+ match self.inner(py).borrow_mut()
.read(st.extract::<PyBytes>(py)?.data(py))
{
Ok(Some(parents)) => Ok(Some(
@@ -323,7 +334,7 @@
p2: extract_node_id(py, &p2)?,
};
- match self.inner_shared(py).borrow_mut()?.pack(parents, now) {
+ match self.inner(py).borrow_mut().pack(parents, now) {
Ok(packed) => Ok(PyBytes::new(py, &packed)),
Err(_) => Err(PyErr::new::<exc::OSError, _>(
py,
@@ -335,7 +346,7 @@
def filefoldmapasdict(&self) -> PyResult<PyDict> {
let dict = PyDict::new(py);
for (key, value) in
- self.inner_shared(py).borrow_mut()?.build_file_fold_map().iter()
+ self.inner(py).borrow_mut().build_file_fold_map().iter()
{
dict.set_item(py, key.as_ref().to_vec(), value.as_ref().to_vec())?;
}
@@ -343,20 +354,18 @@
}
def __len__(&self) -> PyResult<usize> {
- Ok(self.inner_shared(py).borrow().len())
+ Ok(self.inner(py).borrow().len())
}
def __contains__(&self, key: PyObject) -> PyResult<bool> {
let key = key.extract::<PyBytes>(py)?;
- Ok(self.inner_shared(py)
- .borrow()
- .contains_key(HgPath::new(key.data(py))))
+ Ok(self.inner(py).borrow().contains_key(HgPath::new(key.data(py))))
}
def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
let key = key.extract::<PyBytes>(py)?;
let key = HgPath::new(key.data(py));
- match self.inner_shared(py).borrow().get(key) {
+ match self.inner(py).borrow().get(key) {
Some(entry) => {
Ok(make_dirstate_tuple(py, entry)?)
},
@@ -368,7 +377,7 @@
}
def keys(&self) -> PyResult<DirstateMapKeysIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
DirstateMapKeysIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.iter()) },
@@ -376,7 +385,7 @@
}
def items(&self) -> PyResult<DirstateMapItemsIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
DirstateMapItemsIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.iter()) },
@@ -384,7 +393,7 @@
}
def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
DirstateMapKeysIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.iter()) },
@@ -393,14 +402,14 @@
def getdirs(&self) -> PyResult<Dirs> {
// TODO don't copy, share the reference
- self.inner_shared(py).borrow_mut()?.set_dirs()
+ self.inner(py).borrow_mut().set_dirs()
.map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?;
Dirs::from_inner(
py,
DirsMultiset::from_dirstate(
- &self.inner_shared(py).borrow(),
+ &self.inner(py).borrow(),
Some(EntryState::Removed),
)
.map_err(|e| {
@@ -410,14 +419,14 @@
}
def getalldirs(&self) -> PyResult<Dirs> {
// TODO don't copy, share the reference
- self.inner_shared(py).borrow_mut()?.set_all_dirs()
+ self.inner(py).borrow_mut().set_all_dirs()
.map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?;
Dirs::from_inner(
py,
DirsMultiset::from_dirstate(
- &self.inner_shared(py).borrow(),
+ &self.inner(py).borrow(),
None,
).map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
@@ -428,7 +437,7 @@
// TODO all copymap* methods, see docstring above
def copymapcopy(&self) -> PyResult<PyDict> {
let dict = PyDict::new(py);
- for (key, value) in self.inner_shared(py).borrow().copy_map.iter() {
+ for (key, value) in self.inner(py).borrow().copy_map.iter() {
dict.set_item(
py,
PyBytes::new(py, key.as_ref()),
@@ -440,10 +449,7 @@
def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
let key = key.extract::<PyBytes>(py)?;
- match self.inner_shared(py)
- .borrow()
- .copy_map
- .get(HgPath::new(key.data(py))) {
+ match self.inner(py).borrow().copy_map.get(HgPath::new(key.data(py))) {
Some(copy) => Ok(PyBytes::new(py, copy.as_ref())),
None => Err(PyErr::new::<exc::KeyError, _>(
py,
@@ -456,12 +462,12 @@
}
def copymaplen(&self) -> PyResult<usize> {
- Ok(self.inner_shared(py).borrow().copy_map.len())
+ Ok(self.inner(py).borrow().copy_map.len())
}
def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
let key = key.extract::<PyBytes>(py)?;
Ok(self
- .inner_shared(py)
+ .inner(py)
.borrow()
.copy_map
.contains_key(HgPath::new(key.data(py))))
@@ -473,7 +479,7 @@
) -> PyResult<Option<PyObject>> {
let key = key.extract::<PyBytes>(py)?;
match self
- .inner_shared(py)
+ .inner(py)
.borrow()
.copy_map
.get(HgPath::new(key.data(py)))
@@ -491,7 +497,7 @@
) -> PyResult<PyObject> {
let key = key.extract::<PyBytes>(py)?;
let value = value.extract::<PyBytes>(py)?;
- self.inner_shared(py).borrow_mut()?.copy_map.insert(
+ self.inner(py).borrow_mut().copy_map.insert(
HgPathBuf::from_bytes(key.data(py)),
HgPathBuf::from_bytes(value.data(py)),
);
@@ -504,8 +510,8 @@
) -> PyResult<Option<PyObject>> {
let key = key.extract::<PyBytes>(py)?;
match self
- .inner_shared(py)
- .borrow_mut()?
+ .inner(py)
+ .borrow_mut()
.copy_map
.remove(HgPath::new(key.data(py)))
{
@@ -515,7 +521,7 @@
}
def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
CopyMapKeysIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
@@ -523,7 +529,7 @@
}
def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
- let leaked_ref = self.inner_shared(py).leak_immutable();
+ let leaked_ref = self.inner(py).leak_immutable();
CopyMapItemsIterator::from_inner(
py,
unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
@@ -537,7 +543,7 @@
&'a self,
py: Python<'a>,
) -> Ref<'a, RustDirstateMap> {
- self.inner_shared(py).borrow()
+ self.inner(py).borrow()
}
fn translate_key(
py: Python,
@@ -557,18 +563,16 @@
}
}
-py_shared_ref!(DirstateMap, RustDirstateMap, inner, inner_shared);
-
py_shared_iterator!(
DirstateMapKeysIterator,
- PyLeaked<StateMapIter<'static>>,
+ UnsafePyLeaked<StateMapIter<'static>>,
DirstateMap::translate_key,
Option<PyBytes>
);
py_shared_iterator!(
DirstateMapItemsIterator,
- PyLeaked<StateMapIter<'static>>,
+ UnsafePyLeaked<StateMapIter<'static>>,
DirstateMap::translate_key_value,
Option<(PyBytes, PyObject)>
);
--- a/rust/hg-cpython/src/dirstate/non_normal_entries.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate/non_normal_entries.rs Thu Apr 16 22:51:09 2020 +0530
@@ -6,11 +6,15 @@
// GNU General Public License version 2 or any later version.
use cpython::{
- exc::NotImplementedError, CompareOp, ObjectProtocol, PyErr, PyList,
- PyObject, PyResult, PyString, Python, PythonObject, ToPyObject,
+ exc::NotImplementedError, CompareOp, ObjectProtocol, PyBytes, PyClone,
+ PyErr, PyList, PyObject, PyResult, PyString, Python, PythonObject,
+ ToPyObject, UnsafePyLeaked,
};
use crate::dirstate::DirstateMap;
+use hg::utils::hg_path::HgPathBuf;
+use std::cell::RefCell;
+use std::collections::hash_set;
py_class!(pub class NonNormalEntries |py| {
data dmap: DirstateMap;
@@ -34,6 +38,10 @@
def __repr__(&self) -> PyResult<PyString> {
self.dmap(py).non_normal_entries_display(py)
}
+
+ def __iter__(&self) -> PyResult<NonNormalEntriesIterator> {
+ self.dmap(py).non_normal_entries_iter(py)
+ }
});
impl NonNormalEntries {
@@ -49,4 +57,20 @@
}
Ok(true)
}
+
+ fn translate_key(
+ py: Python,
+ key: &HgPathBuf,
+ ) -> PyResult<Option<PyBytes>> {
+ Ok(Some(PyBytes::new(py, key.as_ref())))
+ }
}
+
+type NonNormalEntriesIter<'a> = hash_set::Iter<'a, HgPathBuf>;
+
+py_shared_iterator!(
+ NonNormalEntriesIterator,
+ UnsafePyLeaked<NonNormalEntriesIter<'static>>,
+ NonNormalEntries::translate_key,
+ Option<PyBytes>
+);
--- a/rust/hg-cpython/src/dirstate/status.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/dirstate/status.rs Thu Apr 16 22:51:09 2020 +0530
@@ -9,33 +9,34 @@
//! `hg-core` crate. From Python, this will be seen as
//! `rustext.dirstate.status`.
-use crate::dirstate::DirstateMap;
-use cpython::exc::ValueError;
+use crate::{dirstate::DirstateMap, exceptions::FallbackError};
use cpython::{
- ObjectProtocol, PyBytes, PyErr, PyList, PyObject, PyResult, PyTuple,
- Python, PythonObject, ToPyObject,
+ exc::ValueError, ObjectProtocol, PyBytes, PyErr, PyList, PyObject,
+ PyResult, PyTuple, Python, PythonObject, ToPyObject,
};
-use hg::utils::hg_path::HgPathBuf;
use hg::{
- matchers::{AlwaysMatcher, FileMatcher},
- status,
- utils::{files::get_path_from_bytes, hg_path::HgPath},
- StatusResult,
+ matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
+ parse_pattern_syntax, status,
+ utils::{
+ files::{get_bytes_from_path, get_path_from_bytes},
+ hg_path::{HgPath, HgPathBuf},
+ },
+ BadMatch, DirstateStatus, IgnorePattern, PatternFileWarning, StatusError,
+ StatusOptions,
};
-use std::borrow::Borrow;
+use std::borrow::{Borrow, Cow};
/// This will be useless once trait impls for collection are added to `PyBytes`
/// upstream.
-fn collect_pybytes_list<P: AsRef<HgPath>>(
+fn collect_pybytes_list(
py: Python,
- collection: &[P],
+ collection: &[impl AsRef<HgPath>],
) -> PyList {
let list = PyList::new(py, &[]);
- for (i, path) in collection.iter().enumerate() {
- list.insert_item(
+ for path in collection.iter() {
+ list.append(
py,
- i,
PyBytes::new(py, path.as_ref().as_bytes()).into_object(),
)
}
@@ -43,34 +44,100 @@
list
}
+fn collect_bad_matches(
+ py: Python,
+ collection: &[(impl AsRef<HgPath>, BadMatch)],
+) -> PyResult<PyList> {
+ let list = PyList::new(py, &[]);
+
+ let os = py.import("os")?;
+ let get_error_message = |code: i32| -> PyResult<_> {
+ os.call(
+ py,
+ "strerror",
+ PyTuple::new(py, &[code.to_py_object(py).into_object()]),
+ None,
+ )
+ };
+
+ for (path, bad_match) in collection.iter() {
+ let message = match bad_match {
+ BadMatch::OsError(code) => get_error_message(*code)?,
+ BadMatch::BadType(bad_type) => format!(
+ "unsupported file type (type is {})",
+ bad_type.to_string()
+ )
+ .to_py_object(py)
+ .into_object(),
+ };
+ list.append(
+ py,
+ (PyBytes::new(py, path.as_ref().as_bytes()), message)
+ .to_py_object(py)
+ .into_object(),
+ )
+ }
+
+ Ok(list)
+}
+
+fn handle_fallback(py: Python, err: StatusError) -> PyErr {
+ match err {
+ StatusError::Pattern(e) => {
+ let as_string = e.to_string();
+ log::trace!("Rust status fallback: `{}`", &as_string);
+
+ PyErr::new::<FallbackError, _>(py, &as_string)
+ }
+ e => PyErr::new::<ValueError, _>(py, e.to_string()),
+ }
+}
+
pub fn status_wrapper(
py: Python,
dmap: DirstateMap,
matcher: PyObject,
root_dir: PyObject,
- list_clean: bool,
+ ignore_files: PyList,
+ check_exec: bool,
last_normal_time: i64,
- check_exec: bool,
-) -> PyResult<(PyList, PyList, PyList, PyList, PyList, PyList, PyList)> {
+ list_clean: bool,
+ list_ignored: bool,
+ list_unknown: bool,
+) -> PyResult<PyTuple> {
let bytes = root_dir.extract::<PyBytes>(py)?;
let root_dir = get_path_from_bytes(bytes.data(py));
let dmap: DirstateMap = dmap.to_py_object(py);
let dmap = dmap.get_inner(py);
+ let ignore_files: PyResult<Vec<_>> = ignore_files
+ .iter(py)
+ .map(|b| {
+ let file = b.extract::<PyBytes>(py)?;
+ Ok(get_path_from_bytes(file.data(py)).to_owned())
+ })
+ .collect();
+ let ignore_files = ignore_files?;
+
match matcher.get_type(py).name(py).borrow() {
"alwaysmatcher" => {
let matcher = AlwaysMatcher;
- let (lookup, status_res) = status(
+ let ((lookup, status_res), warnings) = status(
&dmap,
&matcher,
&root_dir,
- list_clean,
- last_normal_time,
- check_exec,
+ ignore_files,
+ StatusOptions {
+ check_exec,
+ last_normal_time,
+ list_clean,
+ list_ignored,
+ list_unknown,
+ },
)
- .map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?;
- build_response(lookup, status_res, py)
+ .map_err(|e| handle_fallback(py, e))?;
+ build_response(py, lookup, status_res, warnings)
}
"exactmatcher" => {
let files = matcher.call_method(
@@ -92,16 +159,78 @@
let files = files?;
let matcher = FileMatcher::new(&files)
.map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?;
- let (lookup, status_res) = status(
+ let ((lookup, status_res), warnings) = status(
&dmap,
&matcher,
&root_dir,
- list_clean,
- last_normal_time,
- check_exec,
+ ignore_files,
+ StatusOptions {
+ check_exec,
+ last_normal_time,
+ list_clean,
+ list_ignored,
+ list_unknown,
+ },
)
- .map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?;
- build_response(lookup, status_res, py)
+ .map_err(|e| handle_fallback(py, e))?;
+ build_response(py, lookup, status_res, warnings)
+ }
+ "includematcher" => {
+ // Get the patterns from Python even though most of them are
+ // redundant with those we will parse later on, as they include
+ // those passed from the command line.
+ let ignore_patterns: PyResult<Vec<_>> = matcher
+ .getattr(py, "_kindpats")?
+ .iter(py)?
+ .map(|k| {
+ let k = k?;
+ let syntax = parse_pattern_syntax(
+ &[
+ k.get_item(py, 0)?
+ .extract::<PyBytes>(py)?
+ .data(py),
+ &b":"[..],
+ ]
+ .concat(),
+ )
+ .map_err(|e| {
+ handle_fallback(py, StatusError::Pattern(e))
+ })?;
+ let pattern = k.get_item(py, 1)?.extract::<PyBytes>(py)?;
+ let pattern = pattern.data(py);
+ let source = k.get_item(py, 2)?.extract::<PyBytes>(py)?;
+ let source = get_path_from_bytes(source.data(py));
+ let new = IgnorePattern::new(syntax, pattern, source);
+ Ok(new)
+ })
+ .collect();
+
+ let ignore_patterns = ignore_patterns?;
+ let mut all_warnings = vec![];
+
+ let (matcher, warnings) =
+ IncludeMatcher::new(ignore_patterns, &root_dir)
+ .map_err(|e| handle_fallback(py, e.into()))?;
+ all_warnings.extend(warnings);
+
+ let ((lookup, status_res), warnings) = status(
+ &dmap,
+ &matcher,
+ &root_dir,
+ ignore_files,
+ StatusOptions {
+ check_exec,
+ last_normal_time,
+ list_clean,
+ list_ignored,
+ list_unknown,
+ },
+ )
+ .map_err(|e| handle_fallback(py, e))?;
+
+ all_warnings.extend(warnings);
+
+ build_response(py, lookup, status_res, all_warnings)
}
e => {
return Err(PyErr::new::<ValueError, _>(
@@ -113,17 +242,56 @@
}
fn build_response(
- lookup: Vec<&HgPath>,
- status_res: StatusResult,
py: Python,
-) -> PyResult<(PyList, PyList, PyList, PyList, PyList, PyList, PyList)> {
+ lookup: Vec<Cow<HgPath>>,
+ status_res: DirstateStatus,
+ warnings: Vec<PatternFileWarning>,
+) -> PyResult<PyTuple> {
let modified = collect_pybytes_list(py, status_res.modified.as_ref());
let added = collect_pybytes_list(py, status_res.added.as_ref());
let removed = collect_pybytes_list(py, status_res.removed.as_ref());
let deleted = collect_pybytes_list(py, status_res.deleted.as_ref());
let clean = collect_pybytes_list(py, status_res.clean.as_ref());
+ let ignored = collect_pybytes_list(py, status_res.ignored.as_ref());
+ let unknown = collect_pybytes_list(py, status_res.unknown.as_ref());
let lookup = collect_pybytes_list(py, lookup.as_ref());
- let unknown = PyList::new(py, &[]);
+ let bad = collect_bad_matches(py, status_res.bad.as_ref())?;
+ let py_warnings = PyList::new(py, &[]);
+ for warning in warnings.iter() {
+ // We use duck-typing on the Python side for dispatch, good enough for
+ // now.
+ match warning {
+ PatternFileWarning::InvalidSyntax(file, syn) => {
+ py_warnings.append(
+ py,
+ (
+ PyBytes::new(py, &get_bytes_from_path(&file)),
+ PyBytes::new(py, syn),
+ )
+ .to_py_object(py)
+ .into_object(),
+ );
+ }
+ PatternFileWarning::NoSuchFile(file) => py_warnings.append(
+ py,
+ PyBytes::new(py, &get_bytes_from_path(&file)).into_object(),
+ ),
+ }
+ }
- Ok((lookup, modified, added, removed, deleted, unknown, clean))
+ Ok(PyTuple::new(
+ py,
+ &[
+ lookup.into_object(),
+ modified.into_object(),
+ added.into_object(),
+ removed.into_object(),
+ deleted.into_object(),
+ clean.into_object(),
+ ignored.into_object(),
+ unknown.into_object(),
+ py_warnings.into_object(),
+ bad.into_object(),
+ ][..],
+ ))
}
--- a/rust/hg-cpython/src/exceptions.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/exceptions.rs Thu Apr 16 22:51:09 2020 +0530
@@ -13,7 +13,7 @@
//!
//! [`GraphError`]: struct.GraphError.html
use cpython::{
- exc::{IOError, RuntimeError, ValueError},
+ exc::{RuntimeError, ValueError},
py_exception, PyErr, Python,
};
use hg;
@@ -39,34 +39,6 @@
}
}
-py_exception!(rustext, PatternError, RuntimeError);
-py_exception!(rustext, PatternFileError, RuntimeError);
py_exception!(rustext, HgPathPyError, RuntimeError);
-
-impl PatternError {
- pub fn pynew(py: Python, inner: hg::PatternError) -> PyErr {
- match inner {
- hg::PatternError::UnsupportedSyntax(m) => {
- PatternError::new(py, ("PatternError", m))
- }
- }
- }
-}
-
-impl PatternFileError {
- pub fn pynew(py: Python, inner: hg::PatternFileError) -> PyErr {
- match inner {
- hg::PatternFileError::IO(e) => {
- let value = (e.raw_os_error().unwrap_or(2), e.to_string());
- PyErr::new::<IOError, _>(py, value)
- }
- hg::PatternFileError::Pattern(e, l) => match e {
- hg::PatternError::UnsupportedSyntax(m) => {
- PatternFileError::new(py, ("PatternFileError", m, l))
- }
- },
- }
- }
-}
-
+py_exception!(rustext, FallbackError, RuntimeError);
py_exception!(shared_ref, AlreadyBorrowed, RuntimeError);
--- a/rust/hg-cpython/src/filepatterns.rs Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,133 +0,0 @@
-// filepatterns.rs
-//
-// Copyright 2019, Georges Racinet <gracinet@anybox.fr>,
-// Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::filepatterns` module provided by the
-//! `hg-core` crate. From Python, this will be seen as `rustext.filepatterns`
-//! and can be used as replacement for the the pure `filepatterns` Python
-//! module.
-use crate::exceptions::{PatternError, PatternFileError};
-use cpython::{
- PyBytes, PyDict, PyModule, PyObject, PyResult, PyTuple, Python, ToPyObject,
-};
-use hg::utils::files;
-use hg::{build_single_regex, read_pattern_file, LineNumber, PatternTuple};
-use std::path::PathBuf;
-
-/// Rust does not like functions with different return signatures.
-/// The 3-tuple version is always returned by the hg-core function,
-/// the (potential) conversion is handled at this level since it is not likely
-/// to have any measurable impact on performance.
-///
-/// The Python implementation passes a function reference for `warn` instead
-/// of a boolean that is used to emit warnings while parsing. The Rust
-/// implementation chooses to accumulate the warnings and propagate them to
-/// Python upon completion. See the `readpatternfile` function in `match.py`
-/// for more details.
-fn read_pattern_file_wrapper(
- py: Python,
- file_path: PyObject,
- warn: bool,
- source_info: bool,
-) -> PyResult<PyTuple> {
- let bytes = file_path.extract::<PyBytes>(py)?;
- let path = files::get_path_from_bytes(bytes.data(py));
- match read_pattern_file(path, warn) {
- Ok((patterns, warnings)) => {
- if source_info {
- let itemgetter = |x: &PatternTuple| {
- (PyBytes::new(py, &x.0), x.1, PyBytes::new(py, &x.2))
- };
- let results: Vec<(PyBytes, LineNumber, PyBytes)> =
- patterns.iter().map(itemgetter).collect();
- return Ok((results, warnings_to_py_bytes(py, &warnings))
- .to_py_object(py));
- }
- let itemgetter = |x: &PatternTuple| PyBytes::new(py, &x.0);
- let results: Vec<PyBytes> =
- patterns.iter().map(itemgetter).collect();
- Ok(
- (results, warnings_to_py_bytes(py, &warnings))
- .to_py_object(py),
- )
- }
- Err(e) => Err(PatternFileError::pynew(py, e)),
- }
-}
-
-fn warnings_to_py_bytes(
- py: Python,
- warnings: &[(PathBuf, Vec<u8>)],
-) -> Vec<(PyBytes, PyBytes)> {
- warnings
- .iter()
- .map(|(path, syn)| {
- (
- PyBytes::new(py, &files::get_bytes_from_path(path)),
- PyBytes::new(py, syn),
- )
- })
- .collect()
-}
-
-fn build_single_regex_wrapper(
- py: Python,
- kind: PyObject,
- pat: PyObject,
- globsuffix: PyObject,
-) -> PyResult<PyBytes> {
- match build_single_regex(
- kind.extract::<PyBytes>(py)?.data(py),
- pat.extract::<PyBytes>(py)?.data(py),
- globsuffix.extract::<PyBytes>(py)?.data(py),
- ) {
- Ok(regex) => Ok(PyBytes::new(py, ®ex)),
- Err(e) => Err(PatternError::pynew(py, e)),
- }
-}
-
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
- let dotted_name = &format!("{}.filepatterns", package);
- let m = PyModule::new(py, dotted_name)?;
-
- m.add(py, "__package__", package)?;
- m.add(
- py,
- "__doc__",
- "Patterns files parsing - Rust implementation",
- )?;
- m.add(
- py,
- "build_single_regex",
- py_fn!(
- py,
- build_single_regex_wrapper(
- kind: PyObject,
- pat: PyObject,
- globsuffix: PyObject
- )
- ),
- )?;
- m.add(
- py,
- "read_pattern_file",
- py_fn!(
- py,
- read_pattern_file_wrapper(
- file_path: PyObject,
- warn: bool,
- source_info: bool
- )
- ),
- )?;
- m.add(py, "PatternError", py.get_type::<PatternError>())?;
- let sys = PyModule::import(py, "sys")?;
- let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
- sys_modules.set_item(py, dotted_name, &m)?;
-
- Ok(m)
-}
--- a/rust/hg-cpython/src/lib.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/lib.rs Thu Apr 16 22:51:09 2020 +0530
@@ -30,10 +30,10 @@
#[macro_use]
pub mod ref_sharing;
pub mod dagops;
+pub mod debug;
pub mod dirstate;
pub mod discovery;
pub mod exceptions;
-pub mod filepatterns;
pub mod parsers;
pub mod revlog;
pub mod utils;
@@ -48,30 +48,16 @@
let dotted_name: String = m.get(py, "__name__")?.extract(py)?;
m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?;
m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?;
+ m.add(py, "debug", debug::init_module(py, &dotted_name)?)?;
m.add(py, "discovery", discovery::init_module(py, &dotted_name)?)?;
m.add(py, "dirstate", dirstate::init_module(py, &dotted_name)?)?;
m.add(py, "revlog", revlog::init_module(py, &dotted_name)?)?;
m.add(
py,
- "filepatterns",
- filepatterns::init_module(py, &dotted_name)?,
- )?;
- m.add(
- py,
"parsers",
parsers::init_parsers_module(py, &dotted_name)?,
)?;
m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?;
- m.add(
- py,
- "PatternFileError",
- py.get_type::<exceptions::PatternFileError>(),
- )?;
- m.add(
- py,
- "PatternError",
- py.get_type::<exceptions::PatternError>(),
- )?;
Ok(())
});
--- a/rust/hg-cpython/src/ref_sharing.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/ref_sharing.rs Thu Apr 16 22:51:09 2020 +0530
@@ -22,413 +22,6 @@
//! Macros for use in the `hg-cpython` bridge library.
-use crate::exceptions::AlreadyBorrowed;
-use cpython::{exc, PyClone, PyErr, PyObject, PyResult, Python};
-use std::cell::{Ref, RefCell, RefMut};
-use std::ops::{Deref, DerefMut};
-use std::sync::atomic::{AtomicUsize, Ordering};
-
-/// Manages the shared state between Python and Rust
-///
-/// `PySharedState` is owned by `PySharedRefCell`, and is shared across its
-/// derived references. The consistency of these references are guaranteed
-/// as follows:
-///
-/// - The immutability of `py_class!` object fields. Any mutation of
-/// `PySharedRefCell` is allowed only through its `borrow_mut()`.
-/// - The `py: Python<'_>` token, which makes sure that any data access is
-/// synchronized by the GIL.
-/// - The underlying `RefCell`, which prevents `PySharedRefCell` data from
-/// being directly borrowed or leaked while it is mutably borrowed.
-/// - The `borrow_count`, which is the number of references borrowed from
-/// `PyLeaked`. Just like `RefCell`, mutation is prohibited while `PyLeaked`
-/// is borrowed.
-/// - The `generation` counter, which increments on `borrow_mut()`. `PyLeaked`
-/// reference is valid only if the `current_generation()` equals to the
-/// `generation` at the time of `leak_immutable()`.
-#[derive(Debug, Default)]
-struct PySharedState {
- // The counter variable could be Cell<usize> since any operation on
- // PySharedState is synchronized by the GIL, but being "atomic" makes
- // PySharedState inherently Sync. The ordering requirement doesn't
- // matter thanks to the GIL.
- borrow_count: AtomicUsize,
- generation: AtomicUsize,
-}
-
-impl PySharedState {
- fn borrow_mut<'a, T>(
- &'a self,
- py: Python<'a>,
- pyrefmut: RefMut<'a, T>,
- ) -> PyResult<RefMut<'a, T>> {
- match self.current_borrow_count(py) {
- 0 => {
- // Note that this wraps around to the same value if mutably
- // borrowed more than usize::MAX times, which wouldn't happen
- // in practice.
- self.generation.fetch_add(1, Ordering::Relaxed);
- Ok(pyrefmut)
- }
- _ => Err(AlreadyBorrowed::new(
- py,
- "Cannot borrow mutably while immutably borrowed",
- )),
- }
- }
-
- /// Return a reference to the wrapped data and its state with an
- /// artificial static lifetime.
- /// We need to be protected by the GIL for thread-safety.
- ///
- /// # Safety
- ///
- /// This is highly unsafe since the lifetime of the given data can be
- /// extended. Do not call this function directly.
- unsafe fn leak_immutable<T>(
- &self,
- _py: Python,
- data: Ref<T>,
- ) -> (&'static T, &'static PySharedState) {
- let ptr: *const T = &*data;
- let state_ptr: *const PySharedState = self;
- (&*ptr, &*state_ptr)
- }
-
- fn current_borrow_count(&self, _py: Python) -> usize {
- self.borrow_count.load(Ordering::Relaxed)
- }
-
- fn increase_borrow_count(&self, _py: Python) {
- // Note that this wraps around if there are more than usize::MAX
- // borrowed references, which shouldn't happen due to memory limit.
- self.borrow_count.fetch_add(1, Ordering::Relaxed);
- }
-
- fn decrease_borrow_count(&self, _py: Python) {
- let prev_count = self.borrow_count.fetch_sub(1, Ordering::Relaxed);
- assert!(prev_count > 0);
- }
-
- fn current_generation(&self, _py: Python) -> usize {
- self.generation.load(Ordering::Relaxed)
- }
-}
-
-/// Helper to keep the borrow count updated while the shared object is
-/// immutably borrowed without using the `RefCell` interface.
-struct BorrowPyShared<'a> {
- py: Python<'a>,
- py_shared_state: &'a PySharedState,
-}
-
-impl<'a> BorrowPyShared<'a> {
- fn new(
- py: Python<'a>,
- py_shared_state: &'a PySharedState,
- ) -> BorrowPyShared<'a> {
- py_shared_state.increase_borrow_count(py);
- BorrowPyShared {
- py,
- py_shared_state,
- }
- }
-}
-
-impl Drop for BorrowPyShared<'_> {
- fn drop(&mut self) {
- self.py_shared_state.decrease_borrow_count(self.py);
- }
-}
-
-/// `RefCell` wrapper to be safely used in conjunction with `PySharedState`.
-///
-/// This object can be stored in a `py_class!` object as a data field. Any
-/// operation is allowed through the `PySharedRef` interface.
-#[derive(Debug)]
-pub struct PySharedRefCell<T> {
- inner: RefCell<T>,
- py_shared_state: PySharedState,
-}
-
-impl<T> PySharedRefCell<T> {
- pub fn new(value: T) -> PySharedRefCell<T> {
- Self {
- inner: RefCell::new(value),
- py_shared_state: PySharedState::default(),
- }
- }
-
- fn borrow<'a>(&'a self, _py: Python<'a>) -> Ref<'a, T> {
- // py_shared_state isn't involved since
- // - inner.borrow() would fail if self is mutably borrowed,
- // - and inner.borrow_mut() would fail while self is borrowed.
- self.inner.borrow()
- }
-
- // TODO: maybe this should be named as try_borrow_mut(), and use
- // inner.try_borrow_mut(). The current implementation panics if
- // self.inner has been borrowed, but returns error if py_shared_state
- // refuses to borrow.
- fn borrow_mut<'a>(&'a self, py: Python<'a>) -> PyResult<RefMut<'a, T>> {
- self.py_shared_state.borrow_mut(py, self.inner.borrow_mut())
- }
-}
-
-/// Sharable data member of type `T` borrowed from the `PyObject`.
-pub struct PySharedRef<'a, T> {
- py: Python<'a>,
- owner: &'a PyObject,
- data: &'a PySharedRefCell<T>,
-}
-
-impl<'a, T> PySharedRef<'a, T> {
- /// # Safety
- ///
- /// The `data` must be owned by the `owner`. Otherwise, the leak count
- /// would get wrong.
- pub unsafe fn new(
- py: Python<'a>,
- owner: &'a PyObject,
- data: &'a PySharedRefCell<T>,
- ) -> Self {
- Self { py, owner, data }
- }
-
- pub fn borrow(&self) -> Ref<'a, T> {
- self.data.borrow(self.py)
- }
-
- pub fn borrow_mut(&self) -> PyResult<RefMut<'a, T>> {
- self.data.borrow_mut(self.py)
- }
-
- /// Returns a leaked reference.
- ///
- /// # Panics
- ///
- /// Panics if this is mutably borrowed.
- pub fn leak_immutable(&self) -> PyLeaked<&'static T> {
- let state = &self.data.py_shared_state;
- // make sure self.data isn't mutably borrowed; otherwise the
- // generation number can't be trusted.
- let data_ref = self.borrow();
- unsafe {
- let (static_ref, static_state_ref) =
- state.leak_immutable(self.py, data_ref);
- PyLeaked::new(self.py, self.owner, static_ref, static_state_ref)
- }
- }
-}
-
-/// Allows a `py_class!` generated struct to share references to one of its
-/// data members with Python.
-///
-/// # Parameters
-///
-/// * `$name` is the same identifier used in for `py_class!` macro call.
-/// * `$inner_struct` is the identifier of the underlying Rust struct
-/// * `$data_member` is the identifier of the data member of `$inner_struct`
-/// that will be shared.
-/// * `$shared_accessor` is the function name to be generated, which allows
-/// safe access to the data member.
-///
-/// # Safety
-///
-/// `$data_member` must persist while the `$name` object is alive. In other
-/// words, it must be an accessor to a data field of the Python object.
-///
-/// # Example
-///
-/// ```
-/// struct MyStruct {
-/// inner: Vec<u32>;
-/// }
-///
-/// py_class!(pub class MyType |py| {
-/// data inner: PySharedRefCell<MyStruct>;
-/// });
-///
-/// py_shared_ref!(MyType, MyStruct, inner, inner_shared);
-/// ```
-macro_rules! py_shared_ref {
- (
- $name: ident,
- $inner_struct: ident,
- $data_member: ident,
- $shared_accessor: ident
- ) => {
- impl $name {
- /// Returns a safe reference to the shared `$data_member`.
- ///
- /// This function guarantees that `PySharedRef` is created with
- /// the valid `self` and `self.$data_member(py)` pair.
- fn $shared_accessor<'a>(
- &'a self,
- py: Python<'a>,
- ) -> $crate::ref_sharing::PySharedRef<'a, $inner_struct> {
- use cpython::PythonObject;
- use $crate::ref_sharing::PySharedRef;
- let owner = self.as_object();
- let data = self.$data_member(py);
- unsafe { PySharedRef::new(py, owner, data) }
- }
- }
- };
-}
-
-/// Manage immutable references to `PyObject` leaked into Python iterators.
-///
-/// This reference will be invalidated once the original value is mutably
-/// borrowed.
-pub struct PyLeaked<T> {
- inner: PyObject,
- data: Option<T>,
- py_shared_state: &'static PySharedState,
- /// Generation counter of data `T` captured when PyLeaked is created.
- generation: usize,
-}
-
-// DO NOT implement Deref for PyLeaked<T>! Dereferencing PyLeaked
-// without taking Python GIL wouldn't be safe. Also, the underling reference
-// is invalid if generation != py_shared_state.generation.
-
-impl<T> PyLeaked<T> {
- /// # Safety
- ///
- /// The `py_shared_state` must be owned by the `inner` Python object.
- fn new(
- py: Python,
- inner: &PyObject,
- data: T,
- py_shared_state: &'static PySharedState,
- ) -> Self {
- Self {
- inner: inner.clone_ref(py),
- data: Some(data),
- py_shared_state,
- generation: py_shared_state.current_generation(py),
- }
- }
-
- /// Immutably borrows the wrapped value.
- ///
- /// Borrowing fails if the underlying reference has been invalidated.
- pub fn try_borrow<'a>(
- &'a self,
- py: Python<'a>,
- ) -> PyResult<PyLeakedRef<'a, T>> {
- self.validate_generation(py)?;
- Ok(PyLeakedRef {
- _borrow: BorrowPyShared::new(py, self.py_shared_state),
- data: self.data.as_ref().unwrap(),
- })
- }
-
- /// Mutably borrows the wrapped value.
- ///
- /// Borrowing fails if the underlying reference has been invalidated.
- ///
- /// Typically `T` is an iterator. If `T` is an immutable reference,
- /// `get_mut()` is useless since the inner value can't be mutated.
- pub fn try_borrow_mut<'a>(
- &'a mut self,
- py: Python<'a>,
- ) -> PyResult<PyLeakedRefMut<'a, T>> {
- self.validate_generation(py)?;
- Ok(PyLeakedRefMut {
- _borrow: BorrowPyShared::new(py, self.py_shared_state),
- data: self.data.as_mut().unwrap(),
- })
- }
-
- /// Converts the inner value by the given function.
- ///
- /// Typically `T` is a static reference to a container, and `U` is an
- /// iterator of that container.
- ///
- /// # Panics
- ///
- /// Panics if the underlying reference has been invalidated.
- ///
- /// This is typically called immediately after the `PyLeaked` is obtained.
- /// In which case, the reference must be valid and no panic would occur.
- ///
- /// # Safety
- ///
- /// The lifetime of the object passed in to the function `f` is cheated.
- /// It's typically a static reference, but is valid only while the
- /// corresponding `PyLeaked` is alive. Do not copy it out of the
- /// function call.
- pub unsafe fn map<U>(
- mut self,
- py: Python,
- f: impl FnOnce(T) -> U,
- ) -> PyLeaked<U> {
- // Needs to test the generation value to make sure self.data reference
- // is still intact.
- self.validate_generation(py)
- .expect("map() over invalidated leaked reference");
-
- // f() could make the self.data outlive. That's why map() is unsafe.
- // In order to make this function safe, maybe we'll need a way to
- // temporarily restrict the lifetime of self.data and translate the
- // returned object back to Something<'static>.
- let new_data = f(self.data.take().unwrap());
- PyLeaked {
- inner: self.inner.clone_ref(py),
- data: Some(new_data),
- py_shared_state: self.py_shared_state,
- generation: self.generation,
- }
- }
-
- fn validate_generation(&self, py: Python) -> PyResult<()> {
- if self.py_shared_state.current_generation(py) == self.generation {
- Ok(())
- } else {
- Err(PyErr::new::<exc::RuntimeError, _>(
- py,
- "Cannot access to leaked reference after mutation",
- ))
- }
- }
-}
-
-/// Immutably borrowed reference to a leaked value.
-pub struct PyLeakedRef<'a, T> {
- _borrow: BorrowPyShared<'a>,
- data: &'a T,
-}
-
-impl<T> Deref for PyLeakedRef<'_, T> {
- type Target = T;
-
- fn deref(&self) -> &T {
- self.data
- }
-}
-
-/// Mutably borrowed reference to a leaked value.
-pub struct PyLeakedRefMut<'a, T> {
- _borrow: BorrowPyShared<'a>,
- data: &'a mut T,
-}
-
-impl<T> Deref for PyLeakedRefMut<'_, T> {
- type Target = T;
-
- fn deref(&self) -> &T {
- self.data
- }
-}
-
-impl<T> DerefMut for PyLeakedRefMut<'_, T> {
- fn deref_mut(&mut self) -> &mut T {
- self.data
- }
-}
-
/// Defines a `py_class!` that acts as a Python iterator over a Rust iterator.
///
/// TODO: this is a bit awkward to use, and a better (more complicated)
@@ -437,12 +30,18 @@
/// # Parameters
///
/// * `$name` is the identifier to give to the resulting Rust struct.
-/// * `$leaked` corresponds to `$leaked` in the matching `py_shared_ref!` call.
+/// * `$leaked` corresponds to `UnsafePyLeaked` in the matching `@shared data`
+/// declaration.
/// * `$iterator_type` is the type of the Rust iterator.
/// * `$success_func` is a function for processing the Rust `(key, value)`
/// tuple on iteration success, turning it into something Python understands.
/// * `$success_func` is the return type of `$success_func`
///
+/// # Safety
+///
+/// `$success_func` may take a reference, but it's lifetime may be cheated.
+/// Do not copy it out of the function call.
+///
/// # Example
///
/// ```
@@ -451,7 +50,7 @@
/// }
///
/// py_class!(pub class MyType |py| {
-/// data inner: PySharedRefCell<MyStruct>;
+/// @shared data inner: MyStruct;
///
/// def __iter__(&self) -> PyResult<MyTypeItemsIterator> {
/// let leaked_ref = self.inner_shared(py).leak_immutable();
@@ -475,11 +74,9 @@
/// }
/// }
///
-/// py_shared_ref!(MyType, MyStruct, inner, MyTypeLeakedRef);
-///
/// py_shared_iterator!(
/// MyTypeItemsIterator,
-/// PyLeaked<HashMap<'static, Vec<u8>, Vec<u8>>>,
+/// UnsafePyLeaked<HashMap<'static, Vec<u8>, Vec<u8>>>,
/// MyType::translate_key_value,
/// Option<(PyBytes, PyBytes)>
/// );
@@ -496,9 +93,10 @@
def __next__(&self) -> PyResult<$success_type> {
let mut leaked = self.inner(py).borrow_mut();
- let mut iter = leaked.try_borrow_mut(py)?;
+ let mut iter = unsafe { leaked.try_borrow_mut(py)? };
match iter.next() {
None => Ok(None),
+ // res may be a reference of cheated 'static lifetime
Some(res) => $success_func(py, res),
}
}
@@ -521,116 +119,3 @@
}
};
}
-
-#[cfg(test)]
-#[cfg(any(feature = "python27-bin", feature = "python3-bin"))]
-mod test {
- use super::*;
- use cpython::{GILGuard, Python};
-
- py_class!(class Owner |py| {
- data string: PySharedRefCell<String>;
- });
- py_shared_ref!(Owner, String, string, string_shared);
-
- fn prepare_env() -> (GILGuard, Owner) {
- let gil = Python::acquire_gil();
- let py = gil.python();
- let owner =
- Owner::create_instance(py, PySharedRefCell::new("new".to_owned()))
- .unwrap();
- (gil, owner)
- }
-
- #[test]
- fn test_leaked_borrow() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let leaked = owner.string_shared(py).leak_immutable();
- let leaked_ref = leaked.try_borrow(py).unwrap();
- assert_eq!(*leaked_ref, "new");
- }
-
- #[test]
- fn test_leaked_borrow_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let leaked = owner.string_shared(py).leak_immutable();
- let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) };
- let mut leaked_ref = leaked_iter.try_borrow_mut(py).unwrap();
- assert_eq!(leaked_ref.next(), Some('n'));
- assert_eq!(leaked_ref.next(), Some('e'));
- assert_eq!(leaked_ref.next(), Some('w'));
- assert_eq!(leaked_ref.next(), None);
- }
-
- #[test]
- fn test_leaked_borrow_after_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let leaked = owner.string_shared(py).leak_immutable();
- owner.string_shared(py).borrow_mut().unwrap().clear();
- assert!(leaked.try_borrow(py).is_err());
- }
-
- #[test]
- fn test_leaked_borrow_mut_after_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let leaked = owner.string_shared(py).leak_immutable();
- let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) };
- owner.string_shared(py).borrow_mut().unwrap().clear();
- assert!(leaked_iter.try_borrow_mut(py).is_err());
- }
-
- #[test]
- #[should_panic(expected = "map() over invalidated leaked reference")]
- fn test_leaked_map_after_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let leaked = owner.string_shared(py).leak_immutable();
- owner.string_shared(py).borrow_mut().unwrap().clear();
- let _leaked_iter = unsafe { leaked.map(py, |s| s.chars()) };
- }
-
- #[test]
- fn test_borrow_mut_while_leaked_ref() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- assert!(owner.string_shared(py).borrow_mut().is_ok());
- let leaked = owner.string_shared(py).leak_immutable();
- {
- let _leaked_ref = leaked.try_borrow(py).unwrap();
- assert!(owner.string_shared(py).borrow_mut().is_err());
- {
- let _leaked_ref2 = leaked.try_borrow(py).unwrap();
- assert!(owner.string_shared(py).borrow_mut().is_err());
- }
- assert!(owner.string_shared(py).borrow_mut().is_err());
- }
- assert!(owner.string_shared(py).borrow_mut().is_ok());
- }
-
- #[test]
- fn test_borrow_mut_while_leaked_ref_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- assert!(owner.string_shared(py).borrow_mut().is_ok());
- let leaked = owner.string_shared(py).leak_immutable();
- let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) };
- {
- let _leaked_ref = leaked_iter.try_borrow_mut(py).unwrap();
- assert!(owner.string_shared(py).borrow_mut().is_err());
- }
- assert!(owner.string_shared(py).borrow_mut().is_ok());
- }
-
- #[test]
- #[should_panic(expected = "mutably borrowed")]
- fn test_leak_while_borrow_mut() {
- let (gil, owner) = prepare_env();
- let py = gil.python();
- let _mut_ref = owner.string_shared(py).borrow_mut();
- owner.string_shared(py).leak_immutable();
- }
-}
--- a/rust/hg-cpython/src/revlog.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/revlog.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,16 +1,25 @@
// revlog.rs
//
-// Copyright 2019 Georges Racinet <georges.racinet@octobus.net>
+// Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
//
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-use crate::cindex;
+use crate::{
+ cindex,
+ utils::{node_from_py_bytes, node_from_py_object},
+};
use cpython::{
- ObjectProtocol, PyClone, PyDict, PyModule, PyObject, PyResult, PyTuple,
- Python, PythonObject, ToPyObject,
+ buffer::{Element, PyBuffer},
+ exc::{IndexError, ValueError},
+ ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyModule, PyObject,
+ PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject,
};
-use hg::Revision;
+use hg::{
+ nodemap::{Block, NodeMapError, NodeTree},
+ revlog::{nodemap::NodeMap, RevlogIndex},
+ NodeError, Revision,
+};
use std::cell::RefCell;
/// Return a Struct implementing the Graph trait
@@ -26,10 +35,13 @@
py_class!(pub class MixedIndex |py| {
data cindex: RefCell<cindex::Index>;
+ data nt: RefCell<Option<NodeTree>>;
+ data docket: RefCell<Option<PyObject>>;
+ // Holds a reference to the mmap'ed persistent nodemap data
+ data mmap: RefCell<Option<PyBuffer>>;
def __new__(_cls, cindex: PyObject) -> PyResult<MixedIndex> {
- Self::create_instance(py, RefCell::new(
- cindex::Index::new(py, cindex)?))
+ Self::new(py, cindex)
}
/// Compatibility layer used for Python consumers needing access to the C index
@@ -43,8 +55,99 @@
Ok(self.cindex(py).borrow().inner().clone_ref(py))
}
+ // Index API involving nodemap, as defined in mercurial/pure/parsers.py
+ /// Return Revision if found, raises a bare `error.RevlogError`
+ /// in case of ambiguity, same as C version does
+ def get_rev(&self, node: PyBytes) -> PyResult<Option<Revision>> {
+ let opt = self.get_nodetree(py)?.borrow();
+ let nt = opt.as_ref().unwrap();
+ let idx = &*self.cindex(py).borrow();
+ let node = node_from_py_bytes(py, &node)?;
+ nt.find_bin(idx, (&node).into()).map_err(|e| nodemap_error(py, e))
+ }
+
+ /// same as `get_rev()` but raises a bare `error.RevlogError` if node
+ /// is not found.
+ ///
+ /// No need to repeat `node` in the exception, `mercurial/revlog.py`
+ /// will catch and rewrap with it
+ def rev(&self, node: PyBytes) -> PyResult<Revision> {
+ self.get_rev(py, node)?.ok_or_else(|| revlog_error(py))
+ }
+
+ /// return True if the node exist in the index
+ def has_node(&self, node: PyBytes) -> PyResult<bool> {
+ self.get_rev(py, node).map(|opt| opt.is_some())
+ }
+
+ /// find length of shortest hex nodeid of a binary ID
+ def shortest(&self, node: PyBytes) -> PyResult<usize> {
+ let opt = self.get_nodetree(py)?.borrow();
+ let nt = opt.as_ref().unwrap();
+ let idx = &*self.cindex(py).borrow();
+ match nt.unique_prefix_len_node(idx, &node_from_py_bytes(py, &node)?)
+ {
+ Ok(Some(l)) => Ok(l),
+ Ok(None) => Err(revlog_error(py)),
+ Err(e) => Err(nodemap_error(py, e)),
+ }
+ }
+
+ def partialmatch(&self, node: PyObject) -> PyResult<Option<PyBytes>> {
+ let opt = self.get_nodetree(py)?.borrow();
+ let nt = opt.as_ref().unwrap();
+ let idx = &*self.cindex(py).borrow();
+
+ let node_as_string = if cfg!(feature = "python3-sys") {
+ node.cast_as::<PyString>(py)?.to_string(py)?.to_string()
+ }
+ else {
+ let node = node.extract::<PyBytes>(py)?;
+ String::from_utf8_lossy(node.data(py)).to_string()
+ };
+
+ nt.find_hex(idx, &node_as_string)
+ // TODO make an inner API returning the node directly
+ .map(|opt| opt.map(
+ |rev| PyBytes::new(py, idx.node(rev).unwrap().as_bytes())))
+ .map_err(|e| nodemap_error(py, e))
+
+ }
+
+ /// append an index entry
+ def append(&self, tup: PyTuple) -> PyResult<PyObject> {
+ if tup.len(py) < 8 {
+ // this is better than the panic promised by tup.get_item()
+ return Err(
+ PyErr::new::<IndexError, _>(py, "tuple index out of range"))
+ }
+ let node_bytes = tup.get_item(py, 7).extract(py)?;
+ let node = node_from_py_object(py, &node_bytes)?;
+
+ let mut idx = self.cindex(py).borrow_mut();
+ let rev = idx.len() as Revision;
+
+ idx.append(py, tup)?;
+ self.get_nodetree(py)?.borrow_mut().as_mut().unwrap()
+ .insert(&*idx, &node, rev)
+ .map_err(|e| nodemap_error(py, e))?;
+ Ok(py.None())
+ }
+
+ def __delitem__(&self, key: PyObject) -> PyResult<()> {
+ // __delitem__ is both for `del idx[r]` and `del idx[r1:r2]`
+ self.cindex(py).borrow().inner().del_item(py, key)?;
+ let mut opt = self.get_nodetree(py)?.borrow_mut();
+ let mut nt = opt.as_mut().unwrap();
+ nt.invalidate_all();
+ self.fill_nodemap(py, &mut nt)?;
+ Ok(())
+ }
+
+ //
// Reforwarded C index API
+ //
// index_methods (tp_methods). Same ordering as in revlog.c
@@ -58,8 +161,12 @@
self.call_cindex(py, "commonancestorsheads", args, kw)
}
- /// clear the index caches
+ /// Clear the index caches and inner py_class data.
+ /// It is Python's responsibility to call `update_nodemap_data` again.
def clearcaches(&self, *args, **kw) -> PyResult<PyObject> {
+ self.nt(py).borrow_mut().take();
+ self.docket(py).borrow_mut().take();
+ self.mmap(py).borrow_mut().take();
self.call_cindex(py, "clearcaches", args, kw)
}
@@ -68,21 +175,6 @@
self.call_cindex(py, "get", args, kw)
}
- /// return `rev` associated with a node or None
- def get_rev(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "get_rev", args, kw)
- }
-
- /// return True if the node exist in the index
- def has_node(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "has_node", args, kw)
- }
-
- /// return `rev` associated with a node or raise RevlogError
- def rev(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "rev", args, kw)
- }
-
/// compute phases
def computephasesmapsets(&self, *args, **kw) -> PyResult<PyObject> {
self.call_cindex(py, "computephasesmapsets", args, kw)
@@ -123,21 +215,6 @@
self.call_cindex(py, "slicechunktodensity", args, kw)
}
- /// append an index entry
- def append(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "append", args, kw)
- }
-
- /// match a potentially ambiguous node ID
- def partialmatch(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "partialmatch", args, kw)
- }
-
- /// find length of shortest hex nodeid of a binary ID
- def shortest(&self, *args, **kw) -> PyResult<PyObject> {
- self.call_cindex(py, "shortest", args, kw)
- }
-
/// stats for the index
def stats(&self, *args, **kw) -> PyResult<PyObject> {
self.call_cindex(py, "stats", args, kw)
@@ -158,7 +235,7 @@
// `index_getitem` does not handle conversion from PyLong,
// which expressions such as [e for e in index] internally use.
// Note that we don't seem to have a direct way to call
- // PySequence_GetItem (does the job), which would be better for
+ // PySequence_GetItem (does the job), which would possibly be better
// for performance
let key = match key.extract::<Revision>(py) {
Ok(rev) => rev.to_py_object(py).into_object(),
@@ -171,10 +248,6 @@
self.cindex(py).borrow().inner().set_item(py, key, value)
}
- def __delitem__(&self, key: PyObject) -> PyResult<()> {
- self.cindex(py).borrow().inner().del_item(py, key)
- }
-
def __contains__(&self, item: PyObject) -> PyResult<bool> {
// ObjectProtocol does not seem to provide contains(), so
// this is an equivalent implementation of the index_contains()
@@ -195,10 +268,66 @@
}
}
+ def nodemap_data_all(&self) -> PyResult<PyBytes> {
+ self.inner_nodemap_data_all(py)
+ }
+
+ def nodemap_data_incremental(&self) -> PyResult<PyObject> {
+ self.inner_nodemap_data_incremental(py)
+ }
+ def update_nodemap_data(
+ &self,
+ docket: PyObject,
+ nm_data: PyObject
+ ) -> PyResult<PyObject> {
+ self.inner_update_nodemap_data(py, docket, nm_data)
+ }
+
});
impl MixedIndex {
+ fn new(py: Python, cindex: PyObject) -> PyResult<MixedIndex> {
+ Self::create_instance(
+ py,
+ RefCell::new(cindex::Index::new(py, cindex)?),
+ RefCell::new(None),
+ RefCell::new(None),
+ RefCell::new(None),
+ )
+ }
+
+ /// This is scaffolding at this point, but it could also become
+ /// a way to start a persistent nodemap or perform a
+ /// vacuum / repack operation
+ fn fill_nodemap(
+ &self,
+ py: Python,
+ nt: &mut NodeTree,
+ ) -> PyResult<PyObject> {
+ let index = self.cindex(py).borrow();
+ for r in 0..index.len() {
+ let rev = r as Revision;
+ // in this case node() won't ever return None
+ nt.insert(&*index, index.node(rev).unwrap(), rev)
+ .map_err(|e| nodemap_error(py, e))?
+ }
+ Ok(py.None())
+ }
+
+ fn get_nodetree<'a>(
+ &'a self,
+ py: Python<'a>,
+ ) -> PyResult<&'a RefCell<Option<NodeTree>>> {
+ if self.nt(py).borrow().is_none() {
+ let readonly = Box::new(Vec::new());
+ let mut nt = NodeTree::load_bytes(readonly, 0);
+ self.fill_nodemap(py, &mut nt)?;
+ self.nt(py).borrow_mut().replace(nt);
+ }
+ Ok(self.nt(py))
+ }
+
/// forward a method call to the underlying C index
fn call_cindex(
&self,
@@ -216,6 +345,138 @@
pub fn clone_cindex(&self, py: Python) -> cindex::Index {
self.cindex(py).borrow().clone_ref(py)
}
+
+ /// Returns the full nodemap bytes to be written as-is to disk
+ fn inner_nodemap_data_all(&self, py: Python) -> PyResult<PyBytes> {
+ let nodemap = self.get_nodetree(py)?.borrow_mut().take().unwrap();
+ let (readonly, bytes) = nodemap.into_readonly_and_added_bytes();
+
+ // If there's anything readonly, we need to build the data again from
+ // scratch
+ let bytes = if readonly.len() > 0 {
+ let mut nt = NodeTree::load_bytes(Box::new(vec![]), 0);
+ self.fill_nodemap(py, &mut nt)?;
+
+ let (readonly, bytes) = nt.into_readonly_and_added_bytes();
+ assert_eq!(readonly.len(), 0);
+
+ bytes
+ } else {
+ bytes
+ };
+
+ let bytes = PyBytes::new(py, &bytes);
+ Ok(bytes)
+ }
+
+ /// Returns the last saved docket along with the size of any changed data
+ /// (in number of blocks), and said data as bytes.
+ fn inner_nodemap_data_incremental(
+ &self,
+ py: Python,
+ ) -> PyResult<PyObject> {
+ let docket = self.docket(py).borrow();
+ let docket = match docket.as_ref() {
+ Some(d) => d,
+ None => return Ok(py.None()),
+ };
+
+ let node_tree = self.get_nodetree(py)?.borrow_mut().take().unwrap();
+ let masked_blocks = node_tree.masked_readonly_blocks();
+ let (_, data) = node_tree.into_readonly_and_added_bytes();
+ let changed = masked_blocks * std::mem::size_of::<Block>();
+
+ Ok((docket, changed, PyBytes::new(py, &data))
+ .to_py_object(py)
+ .into_object())
+ }
+
+ /// Update the nodemap from the new (mmaped) data.
+ /// The docket is kept as a reference for later incremental calls.
+ fn inner_update_nodemap_data(
+ &self,
+ py: Python,
+ docket: PyObject,
+ nm_data: PyObject,
+ ) -> PyResult<PyObject> {
+ let buf = PyBuffer::get(py, &nm_data)?;
+ let len = buf.item_count();
+
+ // Build a slice from the mmap'ed buffer data
+ let cbuf = buf.buf_ptr();
+ let bytes = if std::mem::size_of::<u8>() == buf.item_size()
+ && buf.is_c_contiguous()
+ && u8::is_compatible_format(buf.format())
+ {
+ unsafe { std::slice::from_raw_parts(cbuf as *const u8, len) }
+ } else {
+ return Err(PyErr::new::<ValueError, _>(
+ py,
+ "Nodemap data buffer has an invalid memory representation"
+ .to_string(),
+ ));
+ };
+
+ // Keep a reference to the mmap'ed buffer, otherwise we get a dangling
+ // pointer.
+ self.mmap(py).borrow_mut().replace(buf);
+
+ let mut nt = NodeTree::load_bytes(Box::new(bytes), len);
+
+ let data_tip =
+ docket.getattr(py, "tip_rev")?.extract::<Revision>(py)?;
+ self.docket(py).borrow_mut().replace(docket.clone_ref(py));
+ let idx = self.cindex(py).borrow();
+ let current_tip = idx.len();
+
+ for r in (data_tip + 1)..current_tip as Revision {
+ let rev = r as Revision;
+ // in this case node() won't ever return None
+ nt.insert(&*idx, idx.node(rev).unwrap(), rev)
+ .map_err(|e| nodemap_error(py, e))?
+ }
+
+ *self.nt(py).borrow_mut() = Some(nt);
+
+ Ok(py.None())
+ }
+}
+
+fn revlog_error(py: Python) -> PyErr {
+ match py
+ .import("mercurial.error")
+ .and_then(|m| m.get(py, "RevlogError"))
+ {
+ Err(e) => e,
+ Ok(cls) => PyErr::from_instance(py, cls),
+ }
+}
+
+fn rev_not_in_index(py: Python, rev: Revision) -> PyErr {
+ PyErr::new::<ValueError, _>(
+ py,
+ format!(
+ "Inconsistency: Revision {} found in nodemap \
+ is not in revlog index",
+ rev
+ ),
+ )
+}
+
+/// Standard treatment of NodeMapError
+fn nodemap_error(py: Python, err: NodeMapError) -> PyErr {
+ match err {
+ NodeMapError::MultipleResults => revlog_error(py),
+ NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r),
+ NodeMapError::InvalidNodePrefix(s) => invalid_node_prefix(py, &s),
+ }
+}
+
+fn invalid_node_prefix(py: Python, ne: &NodeError) -> PyErr {
+ PyErr::new::<ValueError, _>(
+ py,
+ format!("Invalid node or prefix: {:?}", ne),
+ )
}
/// Create the module, with __package__ given from parent
--- a/rust/hg-cpython/src/utils.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hg-cpython/src/utils.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,7 @@
-use cpython::{PyDict, PyObject, PyResult, PyTuple, Python};
+use cpython::exc::ValueError;
+use cpython::{PyBytes, PyDict, PyErr, PyObject, PyResult, PyTuple, Python};
+use hg::revlog::Node;
+use std::convert::TryFrom;
#[allow(unused)]
pub fn print_python_trace(py: Python) -> PyResult<PyObject> {
@@ -11,3 +14,34 @@
kwargs.set_item(py, "file", sys.get(py, "stderr")?)?;
traceback.call(py, "print_stack", PyTuple::new(py, &[]), Some(&kwargs))
}
+
+// Necessary evil for the time being, could maybe be moved to
+// a TryFrom in Node itself
+const NODE_BYTES_LENGTH: usize = 20;
+type NodeData = [u8; NODE_BYTES_LENGTH];
+
+/// Copy incoming Python bytes given as `PyObject` into `Node`,
+/// doing the necessary checks
+pub fn node_from_py_object<'a>(
+ py: Python,
+ bytes: &'a PyObject,
+) -> PyResult<Node> {
+ let as_py_bytes: &'a PyBytes = bytes.extract(py)?;
+ node_from_py_bytes(py, as_py_bytes)
+}
+
+/// Clone incoming Python bytes given as `PyBytes` as a `Node`,
+/// doing the necessary checks.
+pub fn node_from_py_bytes<'a>(
+ py: Python,
+ bytes: &'a PyBytes,
+) -> PyResult<Node> {
+ <NodeData>::try_from(bytes.data(py))
+ .map_err(|_| {
+ PyErr::new::<ValueError, _>(
+ py,
+ format!("{}-byte hash required", NODE_BYTES_LENGTH),
+ )
+ })
+ .map(|n| n.into())
+}
--- a/rust/hg-direct-ffi/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-[package]
-name = "hgdirectffi"
-version = "0.1.0"
-authors = ["Georges Racinet <gracinet@anybox.fr>"]
-description = "Low level Python bindings for hg-core, going through existing C extensions"
-
-[dependencies]
-libc = "*"
-hg-core = { path = "../hg-core" }
-
-[lib]
-crate-type = ["staticlib"]
--- a/rust/hg-direct-ffi/rustfmt.toml Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-max_width = 79
-wrap_comments = true
-error_on_line_overflow = true
--- a/rust/hg-direct-ffi/src/ancestors.rs Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,282 +0,0 @@
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for CPython extension code
-//!
-//! This exposes methods to build and use a `rustlazyancestors` iterator
-//! from C code, using an index and its parents function that are passed
-//! from the caller at instantiation.
-
-use hg::AncestorsIterator;
-use hg::{Graph, GraphError, Revision, NULL_REVISION};
-use libc::{c_int, c_long, c_void, ssize_t};
-use std::ptr::null_mut;
-use std::slice;
-
-type IndexPtr = *mut c_void;
-
-extern "C" {
- fn HgRevlogIndex_GetParents(
- op: IndexPtr,
- rev: c_int,
- parents: *mut [c_int; 2],
- ) -> c_int;
-}
-
-/// A Graph backed up by objects and functions from revlog.c
-///
-/// This implementation of the Graph trait, relies on (pointers to)
-/// - the C index object (`index` member)
-/// - the `index_get_parents()` function (`parents` member)
-pub struct Index {
- index: IndexPtr,
-}
-
-impl Index {
- pub fn new(index: IndexPtr) -> Self {
- Index { index: index }
- }
-}
-
-impl Graph for Index {
- /// wrap a call to the C extern parents function
- fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
- let mut res: [c_int; 2] = [0; 2];
- let code = unsafe {
- HgRevlogIndex_GetParents(
- self.index,
- rev,
- &mut res as *mut [c_int; 2],
- )
- };
- match code {
- 0 => Ok(res),
- _ => Err(GraphError::ParentOutOfRange(rev)),
- }
- }
-}
-
-/// Wrapping of AncestorsIterator<Index> constructor, for C callers.
-///
-/// Besides `initrevs`, `stoprev` and `inclusive`, that are converted
-/// we receive the index and the parents function as pointers
-#[no_mangle]
-pub extern "C" fn rustlazyancestors_init(
- index: IndexPtr,
- initrevslen: ssize_t,
- initrevs: *mut c_long,
- stoprev: c_long,
- inclusive: c_int,
-) -> *mut AncestorsIterator<Index> {
- assert!(initrevslen >= 0);
- unsafe {
- raw_init(
- Index::new(index),
- initrevslen as usize,
- initrevs,
- stoprev,
- inclusive,
- )
- }
-}
-
-/// Testable (for any Graph) version of rustlazyancestors_init
-#[inline]
-unsafe fn raw_init<G: Graph>(
- graph: G,
- initrevslen: usize,
- initrevs: *mut c_long,
- stoprev: c_long,
- inclusive: c_int,
-) -> *mut AncestorsIterator<G> {
- let inclb = match inclusive {
- 0 => false,
- 1 => true,
- _ => {
- return null_mut();
- }
- };
-
- let slice = slice::from_raw_parts(initrevs, initrevslen);
-
- Box::into_raw(Box::new(
- match AncestorsIterator::new(
- graph,
- slice.into_iter().map(|&r| r as Revision),
- stoprev as Revision,
- inclb,
- ) {
- Ok(it) => it,
- Err(_) => {
- return null_mut();
- }
- },
- ))
-}
-
-/// Deallocator to be called from C code
-#[no_mangle]
-pub extern "C" fn rustlazyancestors_drop(
- raw_iter: *mut AncestorsIterator<Index>,
-) {
- raw_drop(raw_iter);
-}
-
-/// Testable (for any Graph) version of rustlazayancestors_drop
-#[inline]
-fn raw_drop<G: Graph>(raw_iter: *mut AncestorsIterator<G>) {
- unsafe {
- Box::from_raw(raw_iter);
- }
-}
-
-/// Iteration main method to be called from C code
-///
-/// We convert the end of iteration into NULL_REVISION,
-/// it will be up to the C wrapper to convert that back into a Python end of
-/// iteration
-#[no_mangle]
-pub extern "C" fn rustlazyancestors_next(
- raw: *mut AncestorsIterator<Index>,
-) -> c_long {
- raw_next(raw)
-}
-
-/// Testable (for any Graph) version of rustlazayancestors_next
-#[inline]
-fn raw_next<G: Graph>(raw: *mut AncestorsIterator<G>) -> c_long {
- let as_ref = unsafe { &mut *raw };
- let rev = match as_ref.next() {
- Some(Ok(rev)) => rev,
- Some(Err(_)) | None => NULL_REVISION,
- };
- rev as c_long
-}
-
-#[no_mangle]
-pub extern "C" fn rustlazyancestors_contains(
- raw: *mut AncestorsIterator<Index>,
- target: c_long,
-) -> c_int {
- raw_contains(raw, target)
-}
-
-/// Testable (for any Graph) version of rustlazayancestors_next
-#[inline]
-fn raw_contains<G: Graph>(
- raw: *mut AncestorsIterator<G>,
- target: c_long,
-) -> c_int {
- let as_ref = unsafe { &mut *raw };
- match as_ref.contains(target as Revision) {
- Ok(r) => r as c_int,
- Err(_) => -1,
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use std::thread;
-
- #[derive(Clone, Debug)]
- struct Stub;
-
- impl Graph for Stub {
- fn parents(&self, r: Revision) -> Result<[Revision; 2], GraphError> {
- match r {
- 25 => Err(GraphError::ParentOutOfRange(25)),
- _ => Ok([1, 2]),
- }
- }
- }
-
- /// Helper for test_init_next()
- fn stub_raw_init(
- initrevslen: usize,
- initrevs: usize,
- stoprev: c_long,
- inclusive: c_int,
- ) -> usize {
- unsafe {
- raw_init(
- Stub,
- initrevslen,
- initrevs as *mut c_long,
- stoprev,
- inclusive,
- ) as usize
- }
- }
-
- fn stub_raw_init_from_vec(
- mut initrevs: Vec<c_long>,
- stoprev: c_long,
- inclusive: c_int,
- ) -> *mut AncestorsIterator<Stub> {
- unsafe {
- raw_init(
- Stub,
- initrevs.len(),
- initrevs.as_mut_ptr(),
- stoprev,
- inclusive,
- )
- }
- }
-
- #[test]
- // Test what happens when we init an Iterator as with the exposed C ABI
- // and try to use it afterwards
- // We spawn new threads, in order to make memory consistency harder
- // but this forces us to convert the pointers into shareable usizes.
- fn test_init_next() {
- let mut initrevs: Vec<c_long> = vec![11, 13];
- let initrevs_len = initrevs.len();
- let initrevs_ptr = initrevs.as_mut_ptr() as usize;
- let handler = thread::spawn(move || {
- stub_raw_init(initrevs_len, initrevs_ptr, 0, 1)
- });
- let raw = handler.join().unwrap() as *mut AncestorsIterator<Stub>;
-
- assert_eq!(raw_next(raw), 13);
- assert_eq!(raw_next(raw), 11);
- assert_eq!(raw_next(raw), 2);
- assert_eq!(raw_next(raw), 1);
- assert_eq!(raw_next(raw), NULL_REVISION as c_long);
- raw_drop(raw);
- }
-
- #[test]
- fn test_init_wrong_bool() {
- assert_eq!(stub_raw_init_from_vec(vec![11, 13], 0, 2), null_mut());
- }
-
- #[test]
- fn test_empty() {
- let raw = stub_raw_init_from_vec(vec![], 0, 1);
- assert_eq!(raw_next(raw), NULL_REVISION as c_long);
- raw_drop(raw);
- }
-
- #[test]
- fn test_init_err_out_of_range() {
- assert!(stub_raw_init_from_vec(vec![25], 0, 0).is_null());
- }
-
- #[test]
- fn test_contains() {
- let raw = stub_raw_init_from_vec(vec![5, 6], 0, 1);
- assert_eq!(raw_contains(raw, 5), 1);
- assert_eq!(raw_contains(raw, 2), 1);
- }
-
- #[test]
- fn test_contains_exclusive() {
- let raw = stub_raw_init_from_vec(vec![5, 6], 0, 0);
- assert_eq!(raw_contains(raw, 5), 0);
- assert_eq!(raw_contains(raw, 2), 1);
- }
-}
--- a/rust/hg-direct-ffi/src/lib.rs Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for CPython extension code
-//!
-//! This exposes methods to build and use a `rustlazyancestors` iterator
-//! from C code, using an index and its parents function that are passed
-//! from the caller at instantiation.
-
-extern crate hg;
-extern crate libc;
-
-mod ancestors;
-pub use ancestors::{
- rustlazyancestors_contains, rustlazyancestors_drop,
- rustlazyancestors_init, rustlazyancestors_next,
-};
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/.cargo/config Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,13 @@
+# By default Rust will not export dynamic symbols from built executables.
+# Python symbols need to be exported from executables in order for that
+# executable to load Python extension modules, which are shared libraries.
+# Otherwise, the extension module / shared library is unable to resolve
+# Python symbols. This file contains target-specific configuration
+# overrides to export dynamic symbols from executables.
+#
+# Ideally we would achieve this functionality via the build.rs build
+# script. But custom compiler flags via build scripts apparently only
+# support limited options.
+
+[target.x86_64-unknown-linux-gnu]
+rustflags = ["-C", "link-args=-Wl,-export-dynamic"]
--- a/rust/hgcli/Cargo.lock Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hgcli/Cargo.lock Thu Apr 16 22:51:09 2020 +0530
@@ -1,136 +1,359 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
[[package]]
name = "aho-corasick"
-version = "0.5.3"
+version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "anyhow"
+version = "1.0.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "autocfg"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "byteorder"
+version = "1.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "cc"
+version = "1.0.50"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "cpython"
-version = "0.1.0"
-source = "git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52#c90d65cf84abfffce7ef54476bbfed56017a2f52"
+version = "0.4.1"
+source = "git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f#387e87d9deb6b678508888239f9f87dc36973d3f"
dependencies = [
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "paste 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python3-sys 0.4.1 (git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f)",
+]
+
+[[package]]
+name = "fs_extra"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "getrandom"
+version = "0.1.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hgcli"
version = "0.1.0"
dependencies = [
- "cpython 0.1.0 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
- "python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)",
+ "jemallocator-global 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pyembed 0.7.0-pre (git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d)",
+]
+
+[[package]]
+name = "jemalloc-sys"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "kernel32-sys"
-version = "0.2.2"
+name = "jemallocator"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "jemallocator-global"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "libc"
-version = "0.2.45"
+version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memchr"
-version = "0.1.11"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "memory-module-sys"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
-version = "0.1.43"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "num-traits"
+name = "paste"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "paste-impl 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "paste-impl"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "ppv-lite86"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "python27-sys"
-version = "0.1.2"
-source = "git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52#c90d65cf84abfffce7ef54476bbfed56017a2f52"
+name = "proc-macro-hack"
+version = "0.5.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "pyembed"
+version = "0.7.0-pre"
+source = "git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d#c772a1379c3026314eda1c8ea244b86c0658951d"
+dependencies = [
+ "cpython 0.4.1 (git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f)",
+ "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memory-module-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "python-packed-resources 0.1.0-pre (git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d)",
+ "python3-sys 0.4.1 (git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f)",
+ "uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "python-packed-resources"
+version = "0.1.0-pre"
+source = "git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d#c772a1379c3026314eda1c8ea244b86c0658951d"
+dependencies = [
+ "anyhow 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "python3-sys"
+version = "0.4.1"
+source = "git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f#387e87d9deb6b678508888239f9f87dc36973d3f"
dependencies = [
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
-version = "0.1.80"
+version = "1.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
-version = "0.3.9"
+version = "0.6.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "thread-id"
-version = "2.0.0"
+name = "syn"
+version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread_local"
-version = "0.2.7"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
-name = "utf8-ranges"
-version = "0.1.3"
+name = "unicode-xid"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "uuid"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
-version = "0.2.8"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
-name = "winapi-build"
-version = "0.1.1"
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
-"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
-"checksum cpython 0.1.0 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)" = "<none>"
-"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74"
-"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
-"checksum num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
-"checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1"
-"checksum python27-sys 0.1.2 (git+https://github.com/indygreg/rust-cpython.git?rev=c90d65cf84abfffce7ef54476bbfed56017a2f52)" = "<none>"
-"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
-"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
-"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
-"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
-"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
-"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
-"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)" = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada"
+"checksum anyhow 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "d9a60d744a80c30fcb657dfe2c1b22bcb3e814c1a1e3674f32bf5820b570fbff"
+"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
+"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+"checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd"
+"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+"checksum cpython 0.4.1 (git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f)" = "<none>"
+"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
+"checksum getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
+"checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45"
+"checksum jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69"
+"checksum jemallocator-global 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "991b61de8365c8b5707cf6cabbff98cfd6eaca9b851948b883efea408c7f581e"
+"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+"checksum libc 0.2.68 (registry+https://github.com/rust-lang/crates.io-index)" = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0"
+"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
+"checksum memory-module-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bbdce2925c681860b08875119254fb5543dbf6337c56ff93afebeed9c686da3"
+"checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
+"checksum paste 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "092d791bf7847f70bbd49085489fba25fc2c193571752bff9e36e74e72403932"
+"checksum paste-impl 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "406c23fb4c45cc6f68a9bbabb8ec7bd6f8cfcbd17e9e8f72c2460282f8325729"
+"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
+"checksum proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)" = "0d659fe7c6d27f25e9d80a1a094c223f5246f6a6596453e09d7229bf42750b63"
+"checksum proc-macro2 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)" = "df246d292ff63439fea9bc8c0a270bed0e390d5ebd4db4ba15aba81111b5abe3"
+"checksum pyembed 0.7.0-pre (git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d)" = "<none>"
+"checksum python-packed-resources 0.1.0-pre (git+https://github.com/indygreg/PyOxidizer.git?rev=c772a1379c3026314eda1c8ea244b86c0658951d)" = "<none>"
+"checksum python3-sys 0.4.1 (git+https://github.com/dgrunwald/rust-cpython?rev=387e87d9deb6b678508888239f9f87dc36973d3f)" = "<none>"
+"checksum quote 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f"
+"checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+"checksum rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+"checksum regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3"
+"checksum regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)" = "7fe5bd57d1d7414c6b5ed48563a2c855d995ff777729dcd91c369ec7fea395ae"
+"checksum syn 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "0df0eb663f387145cab623dea85b09c2c5b4b0aef44e945d928e682fce71bb03"
+"checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
+"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
+"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
+"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
+"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/hgcli/Cargo.toml Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hgcli/Cargo.toml Thu Apr 16 22:51:09 2020 +0530
@@ -1,35 +1,24 @@
[package]
name = "hgcli"
version = "0.1.0"
+build = "build.rs"
authors = ["Gregory Szorc <gregory.szorc@gmail.com>"]
+edition = "2018"
license = "GPL-2.0"
-
-build = "build.rs"
+readme = "README.md"
[[bin]]
name = "hg"
path = "src/main.rs"
-[features]
-# localdev: detect Python in PATH and use files from source checkout.
-default = ["localdev"]
-localdev = []
-
[dependencies]
-libc = "0.2.34"
+jemallocator-global = { version = "0.3", optional = true }
+pyembed = { git = "https://github.com/indygreg/PyOxidizer.git", rev = "c772a1379c3026314eda1c8ea244b86c0658951d", default-features=false }
-# We currently use a custom build of cpython and python27-sys with the
-# following changes:
-# * GILGuard call of prepare_freethreaded_python() is removed.
-# TODO switch to official release when our changes are incorporated.
-[dependencies.cpython]
-version = "0.1"
-default-features = false
-features = ["python27-sys"]
-git = "https://github.com/indygreg/rust-cpython.git"
-rev = "c90d65cf84abfffce7ef54476bbfed56017a2f52"
-
-[dependencies.python27-sys]
-version = "0.1.2"
-git = "https://github.com/indygreg/rust-cpython.git"
-rev = "c90d65cf84abfffce7ef54476bbfed56017a2f52"
+[features]
+default = ["build-mode-pyoxidizer-exe"]
+jemalloc = ["jemallocator-global", "pyembed/jemalloc"]
+build-mode-pyoxidizer-exe = ["pyembed/build-mode-pyoxidizer-exe"]
+build-mode-prebuilt-artifacts = ["pyembed/build-mode-prebuilt-artifacts"]
+cpython-link-unresolved-static = ["pyembed/cpython-link-unresolved-static"]
+cpython-link-default = ["pyembed/cpython-link-default"]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/README.md Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,50 @@
+# Oxidized Mercurial
+
+This project provides a Rust implementation of the Mercurial (`hg`)
+version control tool.
+
+Under the hood, the project uses
+[PyOxidizer](https://github.com/indygreg/PyOxidizer) to embed a Python
+interpreter in a binary built with Rust. At run-time, the Rust `fn main()`
+is called and Rust code handles initial process startup. An in-process
+Python interpreter is started (if needed) to provide additional
+functionality.
+
+# Building
+
+This project currently requires an unreleased version of PyOxidizer
+(0.7.0-pre). For best results, build the exact PyOxidizer commit
+as defined in the `pyoxidizer.bzl` file:
+
+ $ git clone https://github.com/indygreg/PyOxidizer.git
+ $ cd PyOxidizer
+ $ git checkout <Git commit from pyoxidizer.bzl>
+ $ cargo build --release
+
+Then build this Rust project using the built `pyoxidizer` executable::
+
+ $ /path/to/pyoxidizer/target/release/pyoxidizer build
+
+If all goes according to plan, there should be an assembled application
+under `build/<arch>/debug/app/` with an `hg` executable:
+
+ $ build/x86_64-unknown-linux-gnu/debug/app/hg version
+ Mercurial Distributed SCM (version 5.3.1+433-f99cd77d53dc+20200331)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2020 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# Running Tests
+
+To run tests with a built `hg` executable, you can use the `--with-hg`
+argument to `run-tests.py`. But there's a wrinkle: many tests run custom
+Python scripts that need to `import` modules provided by Mercurial. Since
+these modules are embedded in the produced `hg` executable, a regular
+Python interpreter can't access them! To work around this, set `PYTHONPATH`
+to the Mercurial source directory. e.g.:
+
+ $ cd /path/to/hg/src/tests
+ $ PYTHONPATH=`pwd`/.. python3.7 run-tests.py \
+ --with-hg `pwd`/../rust/hgcli/build/x86_64-unknown-linux-gnu/debug/app/hg
--- a/rust/hgcli/README.rst Mon Apr 13 16:30:13 2020 +0300
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,58 +0,0 @@
-Features
---------
-
-The following Cargo features are available:
-
-localdev (default)
- Produce files that work with an in-source-tree build.
-
- In this mode, the build finds and uses a ``python2.7`` binary from
- ``PATH``. The ``hg`` binary assumes it runs from ``rust/target/<target>hg``
- and it finds Mercurial files at ``dirname($0)/../../../``.
-
-Build Mechanism
----------------
-
-The produced ``hg`` binary is *bound* to a CPython installation. The
-binary links against and loads a CPython library that is discovered
-at build time (by a ``build.rs`` Cargo build script). The Python
-standard library defined by this CPython installation is also used.
-
-Finding the appropriate CPython installation to use is done by
-the ``python27-sys`` crate's ``build.rs``. Its search order is::
-
-1. ``PYTHON_SYS_EXECUTABLE`` environment variable.
-2. ``python`` executable on ``PATH``
-3. ``python2`` executable on ``PATH``
-4. ``python2.7`` executable on ``PATH``
-
-Additional verification of the found Python will be performed by our
-``build.rs`` to ensure it meets Mercurial's requirements.
-
-Details about the build-time configured Python are built into the
-produced ``hg`` binary. This means that a built ``hg`` binary is only
-suitable for a specific, well-defined role. These roles are controlled
-by Cargo features (see above).
-
-Running
-=======
-
-The ``hgcli`` crate produces an ``hg`` binary. You can run this binary
-via ``cargo run``::
-
- $ cargo run --manifest-path hgcli/Cargo.toml
-
-Or directly::
-
- $ target/debug/hg
- $ target/release/hg
-
-You can also run the test harness with this binary::
-
- $ ./run-tests.py --with-hg ../rust/target/debug/hg
-
-.. note::
-
- Integration with the test harness is still preliminary. Remember to
- ``cargo build`` after changes because the test harness doesn't yet
- automatically build Rust code.
--- a/rust/hgcli/build.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hgcli/build.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,126 +1,16 @@
-// build.rs -- Configure build environment for `hgcli` Rust package.
-//
-// Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-use std::collections::HashMap;
-use std::env;
-use std::path::Path;
-use std::process::Command;
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-struct PythonConfig {
- python: String,
- config: HashMap<String, String>,
-}
-
-fn get_python_config() -> PythonConfig {
- // The python27-sys crate exports a Cargo variable defining the full
- // path to the interpreter being used.
- let python = env::var("DEP_PYTHON27_PYTHON_INTERPRETER")
- .expect("Missing DEP_PYTHON27_PYTHON_INTERPRETER; bad python27-sys crate?");
-
- if !Path::new(&python).exists() {
- panic!(
- "Python interpreter {} does not exist; this should never happen",
- python
- );
- }
+/*! Build script to integrate PyOxidizer. */
- // This is a bit hacky but it gets the job done.
- let separator = "SEPARATOR STRING";
-
- let script = "import sysconfig; \
- c = sysconfig.get_config_vars(); \
- print('SEPARATOR STRING'.join('%s=%s' % i for i in c.items()))";
-
- let mut command = Command::new(&python);
- command.arg("-c").arg(script);
-
- let out = command.output().unwrap();
-
- if !out.status.success() {
- panic!(
- "python script failed: {}",
- String::from_utf8_lossy(&out.stderr)
+fn main() {
+ if let Ok(config_rs) = std::env::var("DEP_PYTHONXY_DEFAULT_PYTHON_CONFIG_RS") {
+ println!(
+ "cargo:rustc-env=PYOXIDIZER_DEFAULT_PYTHON_CONFIG_RS={}",
+ config_rs
);
- }
-
- let stdout = String::from_utf8_lossy(&out.stdout);
- let mut m = HashMap::new();
-
- for entry in stdout.split(separator) {
- let mut parts = entry.splitn(2, "=");
- let key = parts.next().unwrap();
- let value = parts.next().unwrap();
- m.insert(String::from(key), String::from(value));
- }
-
- PythonConfig {
- python: python,
- config: m,
+ } else {
+ panic!("unable to find build artifacts generated by pyembed crate");
}
}
-
-#[cfg(not(target_os = "windows"))]
-fn have_shared(config: &PythonConfig) -> bool {
- match config.config.get("Py_ENABLE_SHARED") {
- Some(value) => value == "1",
- None => false,
- }
-}
-
-#[cfg(target_os = "windows")]
-fn have_shared(config: &PythonConfig) -> bool {
- use std::path::PathBuf;
-
- // python27.dll should exist next to python2.7.exe.
- let mut dll = PathBuf::from(&config.python);
- dll.pop();
- dll.push("python27.dll");
-
- return dll.exists();
-}
-
-const REQUIRED_CONFIG_FLAGS: [&str; 2] = ["Py_USING_UNICODE", "WITH_THREAD"];
-
-fn main() {
- let config = get_python_config();
-
- println!("Using Python: {}", config.python);
- println!("cargo:rustc-env=PYTHON_INTERPRETER={}", config.python);
-
- let prefix = config.config.get("prefix").unwrap();
-
- println!("Prefix: {}", prefix);
-
- // TODO Windows builds don't expose these config flags. Figure out another
- // way.
- #[cfg(not(target_os = "windows"))]
- for key in REQUIRED_CONFIG_FLAGS.iter() {
- let result = match config.config.get(*key) {
- Some(value) => value == "1",
- None => false,
- };
-
- if !result {
- panic!("Detected Python requires feature {}", key);
- }
- }
-
- // We need a Python shared library.
- if !have_shared(&config) {
- panic!("Detected Python lacks a shared library, which is required");
- }
-
- let ucs4 = match config.config.get("Py_UNICODE_SIZE") {
- Some(value) => value == "4",
- None => false,
- };
-
- if !ucs4 {
- #[cfg(not(target_os = "windows"))]
- panic!("Detected Python doesn't support UCS-4 code points");
- }
-}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hgcli/pyoxidizer.bzl Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,59 @@
+ROOT = CWD + "/../.."
+
+def make_exe():
+ dist = default_python_distribution()
+
+ code = "import hgdemandimport; hgdemandimport.enable(); from mercurial import dispatch; dispatch.run()"
+
+ config = PythonInterpreterConfig(
+ raw_allocator = "system",
+ run_eval = code,
+ # We want to let the user load extensions from the file system
+ filesystem_importer = True,
+ # We need this to make resourceutil happy, since it looks for sys.frozen.
+ sys_frozen = True,
+ legacy_windows_stdio = True,
+ )
+
+ exe = dist.to_python_executable(
+ name = "hg",
+ resources_policy = "prefer-in-memory-fallback-filesystem-relative:lib",
+ config = config,
+ # Extension may depend on any Python functionality. Include all
+ # extensions.
+ extension_module_filter = "all",
+ )
+
+ exe.add_python_resources(dist.pip_install([ROOT]))
+
+ return exe
+
+def make_install(exe):
+ m = FileManifest()
+
+ # `hg` goes in root directory.
+ m.add_python_resource(".", exe)
+
+ templates = glob(
+ include = [ROOT + "/mercurial/templates/**/*"],
+ strip_prefix = ROOT + "/mercurial/",
+ )
+ m.add_manifest(templates)
+
+ return m
+
+def make_embedded_resources(exe):
+ return exe.to_embedded_resources()
+
+register_target("exe", make_exe)
+register_target("app", make_install, depends = ["exe"], default = True)
+register_target("embedded", make_embedded_resources, depends = ["exe"], default_build_script = True)
+resolve_targets()
+
+# END OF COMMON USER-ADJUSTED SETTINGS.
+#
+# Everything below this is typically managed by PyOxidizer and doesn't need
+# to be updated by people.
+
+PYOXIDIZER_VERSION = "0.7.0-pre"
+PYOXIDIZER_COMMIT = "c772a1379c3026314eda1c8ea244b86c0658951d"
--- a/rust/hgcli/src/main.rs Mon Apr 13 16:30:13 2020 +0300
+++ b/rust/hgcli/src/main.rs Thu Apr 16 22:51:09 2020 +0530
@@ -1,219 +1,38 @@
-// main.rs -- Main routines for `hg` program
-//
-// Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-extern crate cpython;
-extern crate libc;
-extern crate python27_sys;
-
-use cpython::{NoArgs, ObjectProtocol, PyModule, PyResult, Python};
-use libc::{c_char, c_int};
-
-use std::env;
-use std::ffi::{CString, OsStr};
-#[cfg(target_family = "unix")]
-use std::os::unix::ffi::{OsStrExt, OsStringExt};
-use std::path::PathBuf;
-
-#[derive(Debug)]
-struct Environment {
- _exe: PathBuf,
- python_exe: PathBuf,
- python_home: PathBuf,
- mercurial_modules: PathBuf,
-}
-
-/// Run Mercurial locally from a source distribution or checkout.
-///
-/// hg is <srcdir>/rust/target/<target>/hg
-/// Python interpreter is detected by build script.
-/// Python home is relative to Python interpreter.
-/// Mercurial files are relative to hg binary, which is relative to source root.
-#[cfg(feature = "localdev")]
-fn get_environment() -> Environment {
- let exe = env::current_exe().unwrap();
-
- let mut mercurial_modules = exe.clone();
- mercurial_modules.pop(); // /rust/target/<target>
- mercurial_modules.pop(); // /rust/target
- mercurial_modules.pop(); // /rust
- mercurial_modules.pop(); // /
-
- let python_exe: &'static str = env!("PYTHON_INTERPRETER");
- let python_exe = PathBuf::from(python_exe);
-
- let mut python_home = python_exe.clone();
- python_home.pop();
-
- // On Windows, python2.7.exe exists at the root directory of the Python
- // install. Everywhere else, the Python install root is one level up.
- if !python_exe.ends_with("python2.7.exe") {
- python_home.pop();
- }
-
- Environment {
- _exe: exe.clone(),
- python_exe: python_exe,
- python_home: python_home,
- mercurial_modules: mercurial_modules.to_path_buf(),
- }
-}
-
-// On UNIX, platform string is just bytes and should not contain NUL.
-#[cfg(target_family = "unix")]
-fn cstring_from_os<T: AsRef<OsStr>>(s: T) -> CString {
- CString::new(s.as_ref().as_bytes()).unwrap()
-}
-
-// TODO convert to ANSI characters?
-#[cfg(target_family = "windows")]
-fn cstring_from_os<T: AsRef<OsStr>>(s: T) -> CString {
- CString::new(s.as_ref().to_str().unwrap()).unwrap()
-}
-
-// On UNIX, argv starts as an array of char*. So it is easy to convert
-// to C strings.
-#[cfg(target_family = "unix")]
-fn args_to_cstrings() -> Vec<CString> {
- env::args_os()
- .map(|a| CString::new(a.into_vec()).unwrap())
- .collect()
-}
-
-// TODO Windows support is incomplete. We should either use env::args_os()
-// (or call into GetCommandLineW() + CommandLinetoArgvW()), convert these to
-// PyUnicode instances, and pass these into Python/Mercurial outside the
-// standard PySys_SetArgvEx() mechanism. This will allow us to preserve the
-// raw bytes (since PySys_SetArgvEx() is based on char* and can drop wchar
-// data.
-//
-// For now, we use env::args(). This will choke on invalid UTF-8 arguments.
-// But it is better than nothing.
-#[cfg(target_family = "windows")]
-fn args_to_cstrings() -> Vec<CString> {
- env::args().map(|a| CString::new(a).unwrap()).collect()
-}
+use pyembed::MainPythonInterpreter;
-fn set_python_home(env: &Environment) {
- let raw = cstring_from_os(&env.python_home).into_raw();
- unsafe {
- python27_sys::Py_SetPythonHome(raw);
- }
-}
-
-fn update_modules_path(env: &Environment, py: Python, sys_mod: &PyModule) {
- let sys_path = sys_mod.get(py, "path").unwrap();
- sys_path
- .call_method(py, "insert", (0, env.mercurial_modules.to_str()), None)
- .expect("failed to update sys.path to location of Mercurial modules");
-}
-
-fn run() -> Result<(), i32> {
- let env = get_environment();
-
- //println!("{:?}", env);
-
- // Tell Python where it is installed.
- set_python_home(&env);
-
- // Set program name. The backing memory needs to live for the duration of the
- // interpreter.
- //
- // TODO consider storing this in a static or associating with lifetime of
- // the Python interpreter.
- //
- // Yes, we use the path to the Python interpreter not argv[0] here. The
- // reason is because Python uses the given path to find the location of
- // Python files. Apparently we could define our own ``Py_GetPath()``
- // implementation. But this may require statically linking Python, which is
- // not desirable.
- let program_name = cstring_from_os(&env.python_exe).as_ptr();
- unsafe {
- python27_sys::Py_SetProgramName(program_name as *mut i8);
- }
-
- unsafe {
- python27_sys::Py_Initialize();
- }
-
- // https://docs.python.org/2/c-api/init.html#c.PySys_SetArgvEx has important
- // usage information about PySys_SetArgvEx:
- //
- // * It says the first argument should be the script that is being executed.
- // If not a script, it can be empty. We are definitely not a script.
- // However, parts of Mercurial do look at sys.argv[0]. So we need to set
- // something here.
- //
- // * When embedding Python, we should use ``PySys_SetArgvEx()`` and set
- // ``updatepath=0`` for security reasons. Essentially, Python's default
- // logic will treat an empty argv[0] in a manner that could result in
- // sys.path picking up directories it shouldn't and this could lead to
- // loading untrusted modules.
-
- // env::args() will panic if it sees a non-UTF-8 byte sequence. And
- // Mercurial supports arbitrary encodings of input data. So we need to
- // use OS-specific mechanisms to get the raw bytes without UTF-8
- // interference.
- let args = args_to_cstrings();
- let argv: Vec<*const c_char> = args.iter().map(|a| a.as_ptr()).collect();
-
- unsafe {
- python27_sys::PySys_SetArgvEx(args.len() as c_int, argv.as_ptr() as *mut *mut i8, 0);
- }
-
- let result;
- {
- // These need to be dropped before we call Py_Finalize(). Hence the
- // block.
- let gil = Python::acquire_gil();
- let py = gil.python();
-
- // Mercurial code could call sys.exit(), which will call exit()
- // itself. So this may not return.
- // TODO this may cause issues on Windows due to the CRT mismatch.
- // Investigate if we can intercept sys.exit() or SystemExit() to
- // ensure we handle process exit.
- result = match run_py(&env, py) {
- // Print unhandled exceptions and exit code 255, as this is what
- // `python` does.
- Err(err) => {
- err.print(py);
- Err(255)
- }
- Ok(()) => Ok(()),
- };
- }
-
- unsafe {
- python27_sys::Py_Finalize();
- }
-
- result
-}
-
-fn run_py(env: &Environment, py: Python) -> PyResult<()> {
- let sys_mod = py.import("sys").unwrap();
-
- update_modules_path(&env, py, &sys_mod);
-
- // TODO consider a better error message on failure to import.
- let demand_mod = py.import("hgdemandimport")?;
- demand_mod.call(py, "enable", NoArgs, None)?;
-
- let dispatch_mod = py.import("mercurial.dispatch")?;
- dispatch_mod.call(py, "run", NoArgs, None)?;
-
- Ok(())
-}
+// Include an auto-generated file containing the default
+// `pyembed::PythonConfig` derived by the PyOxidizer configuration file.
+//
+// If you do not want to use PyOxidizer to generate this file, simply
+// remove this line and instantiate your own instance of
+// `pyembed::PythonConfig`.
+include!(env!("PYOXIDIZER_DEFAULT_PYTHON_CONFIG_RS"));
fn main() {
- let exit_code = match run() {
- Err(err) => err,
- Ok(()) => 0,
+ // The following code is in a block so the MainPythonInterpreter is destroyed in an
+ // orderly manner, before process exit.
+ let code = {
+ // Load the default Python configuration as derived by the PyOxidizer config
+ // file used at build time.
+ let config = default_python_config();
+
+ // Construct a new Python interpreter using that config, handling any errors
+ // from construction.
+ match MainPythonInterpreter::new(config) {
+ Ok(mut interp) => {
+ // And run it using the default run configuration as specified by the
+ // configuration. If an uncaught Python exception is raised, handle it.
+ // This includes the special SystemExit, which is a request to terminate the
+ // process.
+ interp.run_as_main()
+ }
+ Err(msg) => {
+ eprintln!("{}", msg);
+ 1
+ }
+ }
};
- std::process::exit(exit_code);
+ // And exit the process according to code execution results.
+ std::process::exit(code);
}
--- a/setup.py Mon Apr 13 16:30:13 2020 +0300
+++ b/setup.py Thu Apr 16 22:51:09 2020 +0530
@@ -3,7 +3,6 @@
#
# 'python setup.py install', or
# 'python setup.py --help' for more options
-
import os
# Mercurial will never work on Python 3 before 3.5 due to a lack
@@ -137,12 +136,6 @@
ispypy = "PyPy" in sys.version
-hgrustext = os.environ.get('HGWITHRUSTEXT')
-# TODO record it for proper rebuild upon changes
-# (see mercurial/__modulepolicy__.py)
-if hgrustext != 'cpython' and hgrustext is not None:
- hgrustext = 'direct-ffi'
-
import ctypes
import errno
import stat, subprocess, time
@@ -323,7 +316,7 @@
# gives precedence to hg.exe in the current directory, so fall back to the
# python invocation of local hg, where pythonXY.dll can always be found.
check_cmd = ['log', '-r.', '-Ttest']
- if os.name != 'nt':
+ if os.name != 'nt' or not os.path.exists("hg.exe"):
try:
retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
except EnvironmentError:
@@ -477,14 +470,49 @@
class hgdist(Distribution):
pure = False
- rust = hgrustext is not None
+ rust = False
+ no_rust = False
cffi = ispypy
global_options = Distribution.global_options + [
('pure', None, "use pure (slow) Python code instead of C extensions"),
('rust', None, "use Rust extensions additionally to C extensions"),
+ (
+ 'no-rust',
+ None,
+ "do not use Rust extensions additionally to C extensions",
+ ),
]
+ negative_opt = Distribution.negative_opt.copy()
+ boolean_options = ['pure', 'rust', 'no-rust']
+ negative_opt['no-rust'] = 'rust'
+
+ def _set_command_options(self, command_obj, option_dict=None):
+ # Not all distutils versions in the wild have boolean_options.
+ # This should be cleaned up when we're Python 3 only.
+ command_obj.boolean_options = (
+ getattr(command_obj, 'boolean_options', []) + self.boolean_options
+ )
+ return Distribution._set_command_options(
+ self, command_obj, option_dict=option_dict
+ )
+
+ def parse_command_line(self):
+ ret = Distribution.parse_command_line(self)
+ if not (self.rust or self.no_rust):
+ hgrustext = os.environ.get('HGWITHRUSTEXT')
+ # TODO record it for proper rebuild upon changes
+ # (see mercurial/__modulepolicy__.py)
+ if hgrustext != 'cpython' and hgrustext is not None:
+ if hgrustext:
+ msg = 'unkown HGWITHRUSTEXT value: %s' % hgrustext
+ printf(msg, file=sys.stderr)
+ hgrustext = None
+ self.rust = hgrustext is not None
+ self.no_rust = not self.rust
+ return ret
+
def has_ext_modules(self):
# self.ext_modules is emptied in hgbuildpy.finalize_options which is
# too late for some cases
@@ -543,7 +571,7 @@
# Build Rust standalon extensions if it'll be used
# and its build is not explictely disabled (for external build
# as Linux distributions would do)
- if self.distribution.rust and self.rust and hgrustext != 'direct-ffi':
+ if self.distribution.rust and self.rust:
for rustext in ruststandalones:
rustext.build('' if self.inplace else self.build_lib)
@@ -935,11 +963,11 @@
normalizecrlf('doc/%s.html' % root)
# This logic is duplicated in doc/Makefile.
- sources = set(
+ sources = {
f
for f in os.listdir('mercurial/helptext')
if re.search(r'[0-9]\.txt$', f)
- )
+ }
# common.txt is a one-off.
gentxt('common')
@@ -979,7 +1007,7 @@
# Screen out egg related commands to prevent egg generation. But allow
# mercurial.egg-info generation, since that is part of modern
# packaging.
- excl = set(['bdist_egg'])
+ excl = {'bdist_egg'}
return filter(lambda x: x not in excl, install.get_sub_commands(self))
@@ -1211,7 +1239,9 @@
'hgext.fsmonitor',
'hgext.fastannotate',
'hgext.fsmonitor.pywatchman',
+ 'hgext.git',
'hgext.highlight',
+ 'hgext.hooklib',
'hgext.infinitepush',
'hgext.largefiles',
'hgext.lfs',
@@ -1242,6 +1272,13 @@
]
common_include_dirs = ['mercurial']
+common_cflags = []
+
+# MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
+# makes declarations not at the top of a scope in the headers.
+if os.name != 'nt' and sys.version_info[1] < 9:
+ common_cflags = ['-Werror=declaration-after-statement']
+
osutil_cflags = []
osutil_ldflags = []
@@ -1356,10 +1393,19 @@
env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
cargocmd = ['cargo', 'rustc', '-vv', '--release']
+
+ feature_flags = []
+
if sys.version_info[0] == 3 and self.py3_features is not None:
- cargocmd.extend(
- ('--features', self.py3_features, '--no-default-features')
- )
+ feature_flags.append(self.py3_features)
+ cargocmd.append('--no-default-features')
+
+ rust_features = env.get("HG_RUST_FEATURES")
+ if rust_features:
+ feature_flags.append(rust_features)
+
+ cargocmd.extend(('--features', " ".join(feature_flags)))
+
cargocmd.append('--')
if sys.platform == 'darwin':
cargocmd.extend(
@@ -1384,29 +1430,6 @@
)
-class RustEnhancedExtension(RustExtension):
- """A C Extension, conditionally enhanced with Rust code.
-
- If the HGWITHRUSTEXT environment variable is set to something else
- than 'cpython', the Rust sources get compiled and linked within
- the C target shared library object.
- """
-
- def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
- RustExtension.__init__(
- self, mpath, sources, rustlibname, subcrate, **kw
- )
- if hgrustext != 'direct-ffi':
- return
- self.extra_compile_args.append('-DWITH_RUST')
- self.libraries.append(rustlibname)
- self.library_dirs.append(self.rusttargetdir)
-
- def rustbuild(self):
- if hgrustext == 'direct-ffi':
- RustExtension.rustbuild(self)
-
-
class RustStandaloneExtension(RustExtension):
def __init__(self, pydottedname, rustcrate, dylibname, **kw):
RustExtension.__init__(
@@ -1432,21 +1455,24 @@
'mercurial.cext.base85',
['mercurial/cext/base85.c'],
include_dirs=common_include_dirs,
+ extra_compile_args=common_cflags,
depends=common_depends,
),
Extension(
'mercurial.cext.bdiff',
['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
include_dirs=common_include_dirs,
+ extra_compile_args=common_cflags,
depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
),
Extension(
'mercurial.cext.mpatch',
['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
include_dirs=common_include_dirs,
+ extra_compile_args=common_cflags,
depends=common_depends,
),
- RustEnhancedExtension(
+ Extension(
'mercurial.cext.parsers',
[
'mercurial/cext/charencode.c',
@@ -1456,22 +1482,16 @@
'mercurial/cext/pathencode.c',
'mercurial/cext/revlog.c',
],
- 'hgdirectffi',
- 'hg-direct-ffi',
include_dirs=common_include_dirs,
+ extra_compile_args=common_cflags,
depends=common_depends
- + [
- 'mercurial/cext/charencode.h',
- 'mercurial/cext/revlog.h',
- 'rust/hg-core/src/ancestors.rs',
- 'rust/hg-core/src/lib.rs',
- ],
+ + ['mercurial/cext/charencode.h', 'mercurial/cext/revlog.h',],
),
Extension(
'mercurial.cext.osutil',
['mercurial/cext/osutil.c'],
include_dirs=common_include_dirs,
- extra_compile_args=osutil_cflags,
+ extra_compile_args=common_cflags + osutil_cflags,
extra_link_args=osutil_ldflags,
depends=common_depends,
),
@@ -1480,6 +1500,7 @@
[
'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
],
+ extra_compile_args=common_cflags,
),
Extension(
'mercurial.thirdparty.sha1dc',
@@ -1488,9 +1509,12 @@
'mercurial/thirdparty/sha1dc/lib/sha1.c',
'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
],
+ extra_compile_args=common_cflags,
),
Extension(
- 'hgext.fsmonitor.pywatchman.bser', ['hgext/fsmonitor/pywatchman/bser.c']
+ 'hgext.fsmonitor.pywatchman.bser',
+ ['hgext/fsmonitor/pywatchman/bser.c'],
+ extra_compile_args=common_cflags,
),
RustStandaloneExtension(
'mercurial.rustext', 'hg-cpython', 'librusthg', py3_features='python3'
@@ -1501,11 +1525,11 @@
sys.path.insert(0, 'contrib/python-zstandard')
import setup_zstd
-extmodules.append(
- setup_zstd.get_c_extension(
- name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
- )
+zstd = setup_zstd.get_c_extension(
+ name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
)
+zstd.extra_compile_args += common_cflags
+extmodules.append(zstd)
try:
from distutils import cygwinccompiler
--- a/tests/common-pattern.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/common-pattern.py Thu Apr 16 22:51:09 2020 +0530
@@ -12,6 +12,7 @@
br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
br'bookmarks%250A'
br'changegroup%253D01%252C02%250A'
+ br'checkheads%253Drelated%250A'
br'digests%253Dmd5%252Csha1%252Csha512%250A'
br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
br'hgtagsfnodes%250A'
@@ -28,6 +29,7 @@
br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
br'bookmarks%250A'
br'changegroup%253D01%252C02%250A'
+ br'checkheads%3Drelated%0A'
br'digests%253Dmd5%252Csha1%252Csha512%250A'
br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
br'hgtagsfnodes%250A'
@@ -43,6 +45,7 @@
br'bundle2=HG20%0A'
br'bookmarks%0A'
br'changegroup%3D01%2C02%0A'
+ br'checkheads%3Drelated%0A'
br'digests%3Dmd5%2Csha1%2Csha512%0A'
br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
br'hgtagsfnodes%0A'
@@ -60,6 +63,7 @@
br'bundle2=HG20%0A'
br'bookmarks%0A'
br'changegroup%3D01%2C02%0A'
+ br'checkheads%3Drelated%0A'
br'digests%3Dmd5%2Csha1%2Csha512%0A'
br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
br'hgtagsfnodes%0A'
--- a/tests/drawdag.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/drawdag.py Thu Apr 16 22:51:09 2020 +0530
@@ -438,13 +438,13 @@
if cmd in (b'replace', b'rebase', b'amend'):
nodes = [getctx(m) for m in arg.split(b'->')]
for i in range(len(nodes) - 1):
- rels.append((nodes[i], (nodes[i + 1],)))
+ rels.append(((nodes[i],), (nodes[i + 1],)))
elif cmd in (b'split',):
pre, succs = arg.split(b'->')
succs = succs.split(b',')
- rels.append((getctx(pre), (getctx(s) for s in succs)))
+ rels.append(((getctx(pre),), (getctx(s) for s in succs)))
elif cmd in (b'prune',):
for n in arg.split(b','):
- rels.append((getctx(n), ()))
+ rels.append(((getctx(n),), ()))
if rels:
obsolete.createmarkers(repo, rels, date=(0, 0), operation=cmd)
--- a/tests/filtertraceback.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/filtertraceback.py Thu Apr 16 22:51:09 2020 +0530
@@ -4,8 +4,19 @@
from __future__ import absolute_import, print_function
+import io
import sys
+if sys.version_info[0] >= 3:
+ # Prevent \r from being inserted on Windows.
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.buffer,
+ sys.stdout.encoding,
+ sys.stdout.errors,
+ newline="\n",
+ line_buffering=sys.stdout.line_buffering,
+ )
+
state = 'none'
for line in sys.stdin:
--- a/tests/fsmonitor-run-tests.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/fsmonitor-run-tests.py Thu Apr 16 22:51:09 2020 +0530
@@ -30,7 +30,7 @@
PYTHON3 = True
xrange = range # we use xrange in one place, and we'd rather not use range
- def _bytespath(p):
+ def _sys2bytes(p):
return p.encode('utf-8')
@@ -47,7 +47,7 @@
# bytestrings by default, so we don't have to do any extra
# fiddling there. We define the wrapper functions anyway just to
# help keep code consistent between platforms.
- def _bytespath(p):
+ def _sys2bytes(p):
return p
@@ -107,7 +107,7 @@
]
envb = osenvironb.copy()
- envb[b'WATCHMAN_CONFIG_FILE'] = _bytespath(cfgfile)
+ envb[b'WATCHMAN_CONFIG_FILE'] = _sys2bytes(cfgfile)
with open(clilogfile, 'wb') as f:
proc = subprocess.Popen(
argv, env=envb, stdin=None, stdout=f, stderr=f
@@ -129,7 +129,7 @@
args, runtestsargv = parser.parse_known_args()
with watchman(args) as sockfile:
- osenvironb[b'WATCHMAN_SOCK'] = _bytespath(sockfile)
+ osenvironb[b'WATCHMAN_SOCK'] = _sys2bytes(sockfile)
# Indicate to hghave that we're running with fsmonitor enabled.
osenvironb[b'HGFSMONITOR_TESTS'] = b'1'
--- a/tests/hghave.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/hghave.py Thu Apr 16 22:51:09 2020 +0530
@@ -29,12 +29,12 @@
if sys.version_info[0] >= 3:
- def _bytespath(p):
+ def _sys2bytes(p):
if p is None:
return p
return p.encode('utf-8')
- def _strpath(p):
+ def _bytes2sys(p):
if p is None:
return p
return p.decode('utf-8')
@@ -42,10 +42,10 @@
else:
- def _bytespath(p):
+ def _sys2bytes(p):
return p
- _strpath = _bytespath
+ _bytes2sys = _sys2bytes
def check(name, desc):
@@ -307,13 +307,23 @@
return False
-def gethgversion():
+def _gethgversion():
m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
if not m:
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
+_hgversion = None
+
+
+def gethgversion():
+ global _hgversion
+ if _hgversion is None:
+ _hgversion = _gethgversion()
+ return _hgversion
+
+
@checkvers(
"hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
)
@@ -322,6 +332,17 @@
return gethgversion() >= (int(major), int(minor))
+@check("rust", "Using the Rust extensions")
+def has_rust():
+ """Check is the mercurial currently running is using some rust code"""
+ cmd = 'hg debuginstall --quiet 2>&1'
+ match = br'checking module policy \(([^)]+)\)'
+ policy = matchoutput(cmd, match)
+ if not policy:
+ return False
+ return b'rust' in policy.group(1)
+
+
@check("hg08", "Mercurial >= 0.8")
def has_hg08():
if checks["hg09"][0]():
@@ -360,6 +381,17 @@
return (int(m.group(1)), int(m.group(2)))
+@check("pygit2", "pygit2 Python library")
+def has_git():
+ try:
+ import pygit2
+
+ pygit2.Oid # silence unused import
+ return True
+ except ImportError:
+ return False
+
+
# https://github.com/git-lfs/lfs-test-server
@check("lfs-test-server", "git-lfs test server")
def has_lfsserver():
@@ -451,7 +483,7 @@
os.close(fh)
name = tempfile.mktemp(dir='.', prefix=tempprefix)
try:
- util.oslink(_bytespath(fn), _bytespath(name))
+ util.oslink(_sys2bytes(fn), _sys2bytes(name))
os.unlink(name)
return True
except OSError:
@@ -542,11 +574,14 @@
@check("pyflakes", "Pyflakes python linter")
def has_pyflakes():
- return matchoutput(
- "sh -c \"echo 'import re' 2>&1 | pyflakes\"",
- br"<stdin>:1: 're' imported but unused",
- True,
- )
+ try:
+ import pyflakes
+
+ pyflakes.__version__
+ except ImportError:
+ return False
+ else:
+ return True
@check("pylint", "Pylint python linter")
@@ -685,7 +720,7 @@
curses.COLOR_BLUE
return matchoutput('test -x "`which tic`"', br'')
- except ImportError:
+ except (ImportError, AttributeError):
return False
@@ -1022,7 +1057,7 @@
version_regex = b'black, version ([0-9a-b.]+)'
version = matchoutput(blackcmd, version_regex)
sv = distutils.version.StrictVersion
- return version and sv(_strpath(version.group(1))) >= sv('19.10b0')
+ return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
@check('pytype', 'the pytype type checker')
@@ -1030,7 +1065,7 @@
pytypecmd = 'pytype --version'
version = matchoutput(pytypecmd, b'[0-9a-b.]+')
sv = distutils.version.StrictVersion
- return version and sv(_strpath(version.group(0))) >= sv('2019.10.17')
+ return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
@check("rustfmt", "rustfmt tool")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabimport-multi-drev.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,277 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"7918\",\"phid\":\"PHID-DREV-sfsckrwrwc77rdl3k5rz\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7918\",\"dateCreated\":\"1579221164\",\"dateModified\":\"1579222305\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":2},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-pqdlhei24n47fzeofjph\",\"diffs\":[\"19394\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-yhl3yvijs4jploa5iqm4\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7917\",\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7917\",\"dateCreated\":\"1579221160\",\"dateModified\":\"1579222286\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-e64weyerxtutv2jvj2dt\",\"diffs\":[\"19393\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:52:46 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B7917%2C+7918%5D%7D&__conduit__=1",
+ "headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-length": [
+ "154"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":{\"19394\":{\"id\":\"19394\",\"revisionID\":\"7918\",\"dateCreated\":\"1579221161\",\"dateModified\":\"1579221164\",\"sourceControlBaseRevision\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52927\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"sjHKTvwwqRoW\"},\"oldPath\":\"alpha\",\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"2\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"2\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-alpha\\n-more\\n+draft change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parent\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"user\":\"test\"},\"local:commits\":{\"3244dc4a33342b4d91ad534ae091685244ac5ed4\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parents\":[\"7b4185ab5d16acf98e41d566be38c5dbea10878d\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"19393\":{\"id\":\"19393\",\"revisionID\":\"7917\",\"dateCreated\":\"1579221158\",\"dateModified\":\"1579221160\",\"sourceControlBaseRevision\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52926\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"uKa4JPWhh2di\"},\"oldPath\":\"beta\",\"currentPath\":\"beta\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-beta\\n+public change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parent\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"user\":\"test\"},\"local:commits\":{\"7b4185ab5d16acf98e41d566be38c5dbea10878d\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parents\":[\"a692622e693757674f85ff481c7ff77057a7f82a\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:52:46 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B19393%2C+19394%5D%7D&__conduit__=1",
+ "headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-length": [
+ "156"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/beta b\\/beta\\n--- a\\/beta\\n+++ b\\/beta\\n@@ -1 +1 @@\\n-beta\\n+public change\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:52:47 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19393%7D&__conduit__=1",
+ "headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-length": [
+ "144"
+ ]
+ }
+ }
+ },
+ {
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/alpha b\\/alpha\\n--- a\\/alpha\\n+++ b\\/alpha\\n@@ -1,2 +1 @@\\n-alpha\\n-more\\n+draft change\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:52:47 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ]
+ }
+ },
+ "request": {
+ "method": "POST",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19394%7D&__conduit__=1",
+ "headers": {
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-length": [
+ "144"
+ ]
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabimport-stack.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,277 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "request": {
+ "body": "__conduit__=1&output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B7906%2C+7907%2C+7908%2C+7909%2C+7910%2C+7911%2C+7912%2C+7913%2C+7914%2C+7915%2C+7916%2C+7917%2C+7918%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "headers": {
+ "content-length": [
+ "242"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "date": [
+ "Sun, 16 Feb 2020 20:45:32 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"7914\",\"phid\":\"PHID-DREV-u3iz5rww54i5jrsksnr3\",\"title\":\"rust-matchers: implement `visit_children_set` for `FileMatcher`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7914\",\"dateCreated\":\"1579212591\",\"dateModified\":\"1581399130\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":147,\"lines.removed\":5,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"As per the removed inline comment, this will become useful in a future patch\\nin this series as the `IncludeMatcher` is introduced.\",\"testPlan\":\"\",\"lineCount\":\"152\",\"activeDiffPHID\":\"PHID-DIFF-n6cmaq4iwcetzbkkjvje\",\"diffs\":[\"20146\",\"19388\",\"19387\"],\"commits\":[\"PHID-CMIT-zdugtywectjyslokpg45\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-rskbts6c2kyknc66vlzt\",\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-lii2vixihcpnnjss3bzp\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7907\",\"phid\":\"PHID-DREV-jjmiq6h4ychdtvqh3aqu\",\"title\":\"rebase: always be graft-like, not merge-like, also for merges\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7907\",\"dateCreated\":\"1579162215\",\"dateModified\":\"1581387772\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":37,\"lines.removed\":96,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Rebase works by updating to a commit and then grafting changes on\\ntop. However, before this patch, it would actually merge in changes\\ninstead of grafting them in in some cases. That is, it would use the\\ncommon ancestor as base instead of using one of the parents. That\\nseems wrong to me, so I'm changing it so `defineparents()` always\\nreturns a value for `base`.\\n\\nThis fixes the bad behavior in test-rebase-newancestor.t, which was\\nintroduced in 65f215ea3e8e (tests: add test for rebasing merges with\\nancestors of the rebase destination, 2014-11-30).\\n\\nThe difference in test-rebase-dest.t is because the files in the tip\\nrevision were A, D, E, F before this patch and A, D, F, G after it. I\\nthink both files should ideally be there.\",\"testPlan\":\"\",\"lineCount\":\"133\",\"activeDiffPHID\":\"PHID-DIFF-xo54almrs3aipnwsjrju\",\"diffs\":[\"20131\",\"20093\",\"19858\",\"19699\",\"19377\",\"19343\"],\"commits\":[\"PHID-CMIT-bflrckeubx66y5jb3h2w\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-wyjh3r4pzmjaex6k5qtv\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7910\",\"phid\":\"PHID-DREV-lii2vixihcpnnjss3bzp\",\"title\":\"rust-re2: add wrapper for calling Re2 from Rust\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7910\",\"dateCreated\":\"1579182899\",\"dateModified\":\"1581379671\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":195,\"lines.removed\":5,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"This assumes that Re2 is installed following Google's guide. I am not sure\\nhow we want to integrate it in the project, but I think a follow-up patch would\\nbe more appropriate for such work.\\nAs it stands, *not* having Re2 installed results in a compilation error, which\\nis a problem as it breaks install compatibility. Hence, this is gated behind\\na non-default `with-re2` compilation feature.\",\"testPlan\":\"\",\"lineCount\":\"200\",\"activeDiffPHID\":\"PHID-DIFF-hvxi3tvelg75fjugmca5\",\"diffs\":[\"20080\",\"20040\",\"19938\",\"19546\",\"19399\",\"19386\",\"19360\"],\"commits\":[\"PHID-CMIT-5tq5dqzc7uvuanxqr7ze\",\"PHID-CMIT-visqfpftvyutaadm73vj\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-2lpsl6btnf4lltwv7drt\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-xkbqk6xlntkrgqn4x62c\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7909\",\"phid\":\"PHID-DREV-xkbqk6xlntkrgqn4x62c\",\"title\":\"rust-filepatterns: add support for `include` and `subinclude` patterns\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7909\",\"dateCreated\":\"1579174385\",\"dateModified\":\"1581379668\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":129,\"lines.removed\":1,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"This prepares a future patch for `IncludeMatcher` on the road to bare\\n`hg status` support.\",\"testPlan\":\"\",\"lineCount\":\"130\",\"activeDiffPHID\":\"PHID-DIFF-rjff6a36zcgyoctyaacc\",\"diffs\":[\"20079\",\"20039\",\"19385\",\"19357\"],\"commits\":[\"PHID-CMIT-6egqfyiavkmaq3u6cy7f\",\"PHID-CMIT-5xl5pj2nijmojoenjv47\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-k74ndkbhbsjoh6vdf6ch\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7908\",\"phid\":\"PHID-DREV-k74ndkbhbsjoh6vdf6ch\",\"title\":\"rust-filepatterns: improve API and robustness for pattern files parsing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7908\",\"dateCreated\":\"1579170142\",\"dateModified\":\"1581379666\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":245,\"lines.removed\":65,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Within the next few patches we will be using this new API.\",\"testPlan\":\"\",\"lineCount\":\"310\",\"activeDiffPHID\":\"PHID-DIFF-e7c77er3c45mjtkuzmr4\",\"diffs\":[\"20078\",\"20038\",\"19384\",\"19356\",\"19355\"],\"commits\":[\"PHID-CMIT-adevfr2rleerktrzh2zw\",\"PHID-CMIT-2vgwhgqwxfn2x26thcgr\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-du2y5nvrvr43bahbwaia\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7906\",\"phid\":\"PHID-DREV-wyjh3r4pzmjaex6k5qtv\",\"title\":\"rebase: define base in only place in defineparents()\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7906\",\"dateCreated\":\"1579162214\",\"dateModified\":\"1580483936\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":10,\"lines.removed\":10,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Just a little refactoring to prepare for the next patch.\",\"testPlan\":\"\",\"lineCount\":\"20\",\"activeDiffPHID\":\"PHID-DIFF-7ihtsunr2rq5htngocse\",\"diffs\":[\"19720\",\"19698\",\"19342\"],\"commits\":[\"PHID-CMIT-jgxpobg6eadntkxz5tpa\",\"PHID-CMIT-jpk5c6pkor7pm63ztmh5\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-34jnztnonbr4lhwuybwl\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-cknqk5y5i26nfwplj6a2\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7913\",\"phid\":\"PHID-DREV-s4borg2nl7ay2mskktwq\",\"title\":\"cext: fix compiler warning about sign changing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7913\",\"dateCreated\":\"1579207172\",\"dateModified\":\"1579709023\",\"authorPHID\":\"PHID-USER-5iutahkpkhvnxfimqjbk\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":6,\"lines.removed\":6,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"line.len is a Py_ssize_t, and we're casing to size_t (unsigned). On my compiler,\\nthis causes a warning to be emitted:\\n\\n```\\nmercurial\\/cext\\/manifest.c: In function 'pathlen':\\nmercurial\\/cext\\/manifest.c:48:44: warning: operand of ?: changes signedness from 'Py_ssize_t' {aka 'long int'} to 'long unsigned int' due to unsignedness of other operand [-Wsign-compare]\\n return (end) ? (size_t)(end - l-\\u003estart) : l-\\u003elen;\\n ^~~~~~\\n```\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-otv6bgmiu242tgi62saw\",\"diffs\":[\"19406\",\"19380\"],\"commits\":[\"PHID-CMIT-z46nrlwhoumbuxp7f2hy\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-tzhaient733lwrlbcag5\":\"PHID-USER-tzhaient733lwrlbcag5\"},\"ccs\":[\"PHID-USER-qwhdxkyioew7vwvxqc2g\",\"PHID-USER-tzhaient733lwrlbcag5\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7911\",\"phid\":\"PHID-DREV-rjja25ytm3wz7p262cxd\",\"title\":\"examples: refer to nightly rustfmt in Windows-compatible way\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7911\",\"dateCreated\":\"1579192910\",\"dateModified\":\"1579274016\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Thanks to Jun Wu for the tip. I found that the new form also gave\\nbetter error messages when the nightly rustfmt wasn't installed (it\\ntold me which command to run instead of just saying \\\"error: not a\\nfile: \\u003csome path\\u003e\\\").\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-xewewozhprr7tbym4sqx\",\"diffs\":[\"19408\",\"19376\"],\"commits\":[\"PHID-CMIT-zoorilx5m4ijcev7rp2z\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7918\",\"phid\":\"PHID-DREV-sfsckrwrwc77rdl3k5rz\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7918\",\"dateCreated\":\"1579221164\",\"dateModified\":\"1579222305\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":2},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-pqdlhei24n47fzeofjph\",\"diffs\":[\"19394\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-yhl3yvijs4jploa5iqm4\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7917\",\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7917\",\"dateCreated\":\"1579221160\",\"dateModified\":\"1579222286\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-e64weyerxtutv2jvj2dt\",\"diffs\":[\"19393\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7916\",\"phid\":\"PHID-DREV-nk73cg2l2oqfozxnw2i3\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7916\",\"dateCreated\":\"1579221145\",\"dateModified\":\"1579222261\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-vn5llgg5oh2rkzquipx4\",\"diffs\":[\"19392\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-3mzbavd2ajsbar5l3esr\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7915\",\"phid\":\"PHID-DREV-3mzbavd2ajsbar5l3esr\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7915\",\"dateCreated\":\"1579221124\",\"dateModified\":\"1579222242\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-fu7z4h6aahgcq2h2q33b\",\"diffs\":[\"19391\",\"19390\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7912\",\"phid\":\"PHID-DREV-6sl7k5ssqpiymujoeppg\",\"title\":\"py3: fix curses chunkselector fallback (when diffs are too large) on py3\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7912\",\"dateCreated\":\"1579206015\",\"dateModified\":\"1579211357\",\"authorPHID\":\"PHID-USER-5iutahkpkhvnxfimqjbk\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Previously we showed the message using Exception.message, which is removed in\\npy3. Since crecordmod.fallbackerror inherits from error.Abort, we can just use\\n`b'%s' % exception` to print the message. This does not print the hint, but\\nthat's fine - we don't set one. We inherit from error.Abort so that if a\\ncodepath doesn't handle fallback specially, it exits to the terminal with a sane\\nmessage instead of an unknown exception error.\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-45onijfyde7kwtva3efa\",\"diffs\":[\"19381\",\"19379\"],\"commits\":[\"PHID-CMIT-i2qbhmmfpgrrkhubbr5v\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"}],\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ },
+ {
+ "request": {
+ "body": "__conduit__=1&output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B19393%2C+19394%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "headers": {
+ "content-length": [
+ "156"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "date": [
+ "Sun, 16 Feb 2020 20:45:33 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"19394\":{\"id\":\"19394\",\"revisionID\":\"7918\",\"dateCreated\":\"1579221161\",\"dateModified\":\"1579221164\",\"sourceControlBaseRevision\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52927\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"sjHKTvwwqRoW\"},\"oldPath\":\"alpha\",\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"2\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"2\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-alpha\\n-more\\n+draft change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parent\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"user\":\"test\"},\"local:commits\":{\"3244dc4a33342b4d91ad534ae091685244ac5ed4\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parents\":[\"7b4185ab5d16acf98e41d566be38c5dbea10878d\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"19393\":{\"id\":\"19393\",\"revisionID\":\"7917\",\"dateCreated\":\"1579221158\",\"dateModified\":\"1579221160\",\"sourceControlBaseRevision\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52926\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"uKa4JPWhh2di\"},\"oldPath\":\"beta\",\"currentPath\":\"beta\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-beta\\n+public change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parent\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"user\":\"test\"},\"local:commits\":{\"7b4185ab5d16acf98e41d566be38c5dbea10878d\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parents\":[\"a692622e693757674f85ff481c7ff77057a7f82a\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ },
+ {
+ "request": {
+ "body": "__conduit__=1&output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19393%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "headers": {
+ "content-length": [
+ "144"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "date": [
+ "Sun, 16 Feb 2020 20:45:33 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/beta b\\/beta\\n--- a\\/beta\\n+++ b\\/beta\\n@@ -1 +1 @@\\n-beta\\n+public change\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ },
+ {
+ "request": {
+ "body": "__conduit__=1&output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19394%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "headers": {
+ "content-length": [
+ "144"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ },
+ "response": {
+ "status": {
+ "message": "OK",
+ "code": 200
+ },
+ "headers": {
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "date": [
+ "Sun, 16 Feb 2020 20:45:34 GMT"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/alpha b\\/alpha\\n--- a\\/alpha\\n+++ b\\/alpha\\n@@ -1,2 +1 @@\\n-alpha\\n-more\\n+draft change\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabread-empty-drev.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,73 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "request": {
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST",
+ "headers": {
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "content-length": [
+ "146"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B7917%5D%7D&output=json"
+ },
+ "response": {
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "headers": {
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "date": [
+ "Thu, 05 Mar 2020 16:19:23 GMT"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"7917\",\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7917\",\"dateCreated\":\"1579221160\",\"dateModified\":\"1579222286\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-e64weyerxtutv2jvj2dt\",\"diffs\":[\"19393\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabread-multi-drev.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,345 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8207\",\"phid\":\"PHID-DREV-2cgovej5wkjco3xjcqta\",\"title\":\"phabricator: pass ui instead of repo to `userphids()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8207\",\"dateCreated\":\"1583259903\",\"dateModified\":\"1583348836\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":7,\"lines.removed\":4,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Also not a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"11\",\"activeDiffPHID\":\"PHID-DIFF-wzbsydozxy3nv2k6q4nd\",\"diffs\":[\"20443\",\"20423\"],\"commits\":[\"PHID-CMIT-o75v5xkiwt7t4qsjdhhw\",\"PHID-CMIT-4od7afhqygglq77yjjbr\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-gtbyd4t7mjnm4i3erun5\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"8206\",\"phid\":\"PHID-DREV-gtbyd4t7mjnm4i3erun5\",\"title\":\"phabricator: pass ui instead of repo to `querydrev()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8206\",\"dateCreated\":\"1583259900\",\"dateModified\":\"1583348835\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":6,\"lines.removed\":6,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Also not a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-shmhfs2exdg7ituxbt22\",\"diffs\":[\"20442\",\"20422\"],\"commits\":[\"PHID-CMIT-66dzbf7lma7m2ri62tfl\",\"PHID-CMIT-2su6m35fsf32mblyi2ad\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-42xnmk3odcdz2lwckiym\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"8205\",\"phid\":\"PHID-DREV-42xnmk3odcdz2lwckiym\",\"title\":\"phabricator: pass ui instead of repo to `readpatch()`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8205\",\"dateCreated\":\"1583259897\",\"dateModified\":\"1583348832\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":5,\"lines.removed\":7,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"This makes it a little clearer that it isn't a repository operation.\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-atzhtzu6avavi6uevt3n\",\"diffs\":[\"20441\",\"20421\"],\"commits\":[\"PHID-CMIT-wtocju4a33qnh7jwy7on\",\"PHID-CMIT-e3dyltz277hhalnoum4m\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:05:21 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "status": {
+ "message": "OK",
+ "code": 200
+ }
+ },
+ "request": {
+ "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8205%2C+8206%2C+8207%5D%7D&output=json&__conduit__=1",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "headers": {
+ "content-length": [
+ "162"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "body": {
+ "string": "{\"result\":{\"20443\":{\"id\":\"20443\",\"revisionID\":\"8207\",\"dateCreated\":\"1581964120\",\"dateModified\":\"1583327828\",\"sourceControlBaseRevision\":\"9b46270917348950e3fb1e73a5c9e46038065622\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHGa271ef1de08664a9ee4a286711681377875ca2a2\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55598\",\"metadata\":{\"line:first\":1043,\"hash.effect\":\".HmDk8vnow9e\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"7\",\"delLines\":\"4\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1799\",\"newLength\":\"1802\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n [phabricator]\\n # Phabricator URL\\n url = https:\\/\\/phab.example.com\\/\\n \\n # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n # callsign is \\\"FOO\\\".\\n callsign = FOO\\n \\n # curl command to use. If not set (default), use builtin HTTP library to\\n # communicate. If set, use the specified curl command. This could be useful\\n # if you need to specify advanced options that is not easily supported by\\n # the internal library.\\n curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n [auth]\\n example.schemes = https\\n example.prefix = phab.example.com\\n \\n # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n cmdutil,\\n context,\\n encoding,\\n error,\\n exthelper,\\n graphmod,\\n httpconnection as httpconnectionmod,\\n localrepo,\\n logcmdutil,\\n match,\\n mdiff,\\n obsutil,\\n parser,\\n patch,\\n phases,\\n pycompat,\\n scmutil,\\n smartset,\\n tags,\\n templatefilters,\\n templateutil,\\n url as urlmod,\\n util,\\n )\\n from mercurial.utils import (\\n procutil,\\n stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n b'phabricator.action.created': b'green',\\n b'phabricator.action.skipped': b'magenta',\\n b'phabricator.action.updated': b'magenta',\\n b'phabricator.desc': b'',\\n b'phabricator.drev': b'bold',\\n b'phabricator.node': b'',\\n b'phabricator.status.abandoned': b'magenta dim',\\n b'phabricator.status.accepted': b'green bold',\\n b'phabricator.status.closed': b'green',\\n b'phabricator.status.needsreview': b'yellow',\\n b'phabricator.status.needsrevision': b'red',\\n b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n (\\n b'',\\n b'test-vcr',\\n b'',\\n _(\\n b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n b', otherwise will mock all http requests using the specified vcr file.'\\n b' (ADVANCED)'\\n ),\\n ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n \\\"\\\"\\\"\\n result = False\\n arcconfig = {}\\n \\n try:\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n # json.loads only returns unicode strings\\n arcconfig = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n \\n result = True\\n except ValueError:\\n ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n except IOError:\\n pass\\n \\n cfg = util.sortdict()\\n \\n if b\\\"repository.callsign\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n if b\\\"phabricator.uri\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n if cfg:\\n ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n fullflags = flags + _VCR_FLAGS\\n \\n def hgmatcher(r1, r2):\\n if r1.uri != r2.uri or r1.method != r2.method:\\n return False\\n r1params = util.urlreq.parseqs(r1.body)\\n r2params = util.urlreq.parseqs(r2.body)\\n for key in r1params:\\n if key not in r2params:\\n return False\\n value = r1params[key][0]\\n # we want to compare json payloads without worrying about ordering\\n if value.startswith(b'{') and value.endswith(b'}'):\\n r1json = pycompat.json_loads(value)\\n r2json = pycompat.json_loads(r2params[key][0])\\n if r1json != r2json:\\n return False\\n elif r2params[key][0] != value:\\n return False\\n return True\\n \\n def sanitiserequest(request):\\n request.body = re.sub(\\n br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n )\\n return request\\n \\n def sanitiseresponse(response):\\n if 'set-cookie' in response['headers']:\\n del response['headers']['set-cookie']\\n return response\\n \\n def decorate(fn):\\n def inner(*args, **kwargs):\\n cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n if cassette:\\n import hgdemandimport\\n \\n with hgdemandimport.deactivated():\\n import vcr as vcrmod\\n import vcr.stubs as stubs\\n \\n vcr = vcrmod.VCR(\\n serializer='json',\\n before_record_request=sanitiserequest,\\n before_record_response=sanitiseresponse,\\n custom_patches=[\\n (\\n urlmod,\\n 'httpconnection',\\n stubs.VCRHTTPConnection,\\n ),\\n (\\n urlmod,\\n 'httpsconnection',\\n stubs.VCRHTTPSConnection,\\n ),\\n ],\\n )\\n vcr.register_matcher('hgmatcher', hgmatcher)\\n with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n return fn(*args, **kwargs)\\n return fn(*args, **kwargs)\\n \\n inner.__name__ = fn.__name__\\n inner.__doc__ = fn.__doc__\\n return command(\\n name,\\n fullflags,\\n spec,\\n helpcategory=helpcategory,\\n optionalrepo=optionalrepo,\\n )(inner)\\n \\n return decorate\\n \\n \\n def urlencodenested(params):\\n \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n \\\"\\\"\\\"\\n flatparams = util.sortdict()\\n \\n def process(prefix, obj):\\n if isinstance(obj, bool):\\n obj = {True: b'true', False: b'false'}[obj] # Python -\\u003e PHP form\\n lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n if items is None:\\n flatparams[prefix] = obj\\n else:\\n for k, v in items(obj):\\n if prefix:\\n process(b'%s[%s]' % (prefix, k), v)\\n else:\\n process(k, v)\\n \\n process(b'', params)\\n return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n Currently read from [auth] config section. In the future, it might\\n make sense to read from .arcconfig and .arcrc as well.\\n \\\"\\\"\\\"\\n url = ui.config(b'phabricator', b'url')\\n if not url:\\n raise error.Abort(\\n _(b'config %s.%s is required') % (b'phabricator', b'url')\\n )\\n \\n res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n token = None\\n \\n if res:\\n group, auth = res\\n \\n ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n token = auth.get(b'phabtoken')\\n \\n if not token:\\n raise error.Abort(\\n _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n )\\n \\n return url, token\\n \\n \\n def callconduit(ui, name, params):\\n \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n host, token = readurltoken(ui)\\n url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n params = params.copy()\\n params[b'__conduit__'] = {\\n b'token': token,\\n }\\n rawdata = {\\n b'params': templatefilters.json(params),\\n b'output': b'json',\\n b'__conduit__': 1,\\n }\\n data = urlencodenested(rawdata)\\n curlcmd = ui.config(b'phabricator', b'curlcmd')\\n if curlcmd:\\n sin, sout = procutil.popen2(\\n b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n )\\n sin.write(data)\\n sin.close()\\n body = sout.read()\\n else:\\n urlopener = urlmod.opener(ui, authinfo)\\n request = util.urlreq.request(pycompat.strurl(url), data=data)\\n with contextlib.closing(urlopener.open(request)) as rsp:\\n body = rsp.read()\\n ui.debug(b'Conduit Response: %s\\\\n' % body)\\n parsed = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n # json.loads only accepts bytes from py3.6+\\n pycompat.json_loads(encoding.unifromlocal(body)),\\n )\\n if parsed.get(b'error_code'):\\n msg = _(b'Conduit Error (%s): %s') % (\\n parsed[b'error_code'],\\n parsed[b'error_info'],\\n )\\n raise error.Abort(msg)\\n return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n \\\"\\\"\\\"call Conduit API\\n \\n Call parameters are read from stdin as a JSON blob. Result will be written\\n to stdout as a JSON blob.\\n \\\"\\\"\\\"\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(ui.fin.read())\\n # json.loads only returns unicode strings\\n params = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n # json.dumps only accepts unicode strings\\n result = pycompat.rapply(\\n lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n callconduit(ui, name, params),\\n )\\n s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n # developer config: phabricator.repophid\\n repophid = repo.ui.config(b'phabricator', b'repophid')\\n if repophid:\\n return repophid\\n callsign = repo.ui.config(b'phabricator', b'callsign')\\n if not callsign:\\n return None\\n query = callconduit(\\n repo.ui,\\n b'diffusion.repository.search',\\n {b'constraints': {b'callsigns': [callsign]}},\\n )\\n if len(query[b'data']) == 0:\\n return None\\n repophid = query[b'data'][0][b'phid']\\n repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n return {node: (oldnode, Differential diff, Differential Revision ID)}\\n for node in nodelist with known previous sent versions, or associated\\n Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n be ``None``.\\n \\n Examines commit messages like \\\"Differential Revision:\\\" to get the\\n association information.\\n \\n If such commit message line is not found, examines all precursors and their\\n tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n The ``old node``, if not None, is guaranteed to be the last diff of\\n corresponding Differential Revision, and exist in the repo.\\n \\\"\\\"\\\"\\n unfi = repo.unfiltered()\\n has_node = unfi.changelog.index.has_node\\n \\n result = {} # {node: (oldnode?, lastdiff?, drev)}\\n toconfirm = {} # {node: (force, {precnode}, drev)}\\n for node in nodelist:\\n ctx = unfi[node]\\n # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n for n in precnodes:\\n if has_node(n):\\n for tag in unfi.nodetags(n):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n break\\n else:\\n continue # move to next predecessor\\n break # found a tag, stop\\n else:\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n # Double check if tags are genuine by collecting all old nodes from\\n # Phabricator, and expect precursors overlap with it.\\n if toconfirm:\\n drevs = [drev for force, precs, drev in toconfirm.values()]\\n alldiffs = callconduit(\\n unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n )\\n getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n for newnode, (force, precset, drev) in toconfirm.items():\\n diffs = [\\n d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n ]\\n \\n # \\\"precursors\\\" as known by Phabricator\\n phprecset = set(getnode(d) for d in diffs)\\n \\n # Ignore if precursors (Phabricator and local repo) do not overlap,\\n # and force is not set (when commit message says nothing)\\n if not force and not bool(phprecset & precset):\\n tagname = b'D%d' % drev\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n unfi.ui.warn(\\n _(\\n b'D%d: local tag removed - does not match '\\n b'Differential history\\\\n'\\n )\\n % drev\\n )\\n continue\\n \\n # Find the last node using Phabricator metadata, and make sure it\\n # exists in the repo\\n oldnode = lastdiff = None\\n if diffs:\\n lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n oldnode = getnode(lastdiff)\\n if oldnode and not has_node(oldnode):\\n oldnode = None\\n \\n result[newnode] = (oldnode, lastdiff, drev)\\n \\n return result\\n \\n \\n def getdrevmap(repo, revs):\\n \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n ID or None.\\n \\\"\\\"\\\"\\n result = {}\\n for rev in revs:\\n result[rev] = None\\n ctx = repo[rev]\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n result[rev] = int(m.group('id'))\\n continue\\n # Check tags\\n for tag in repo.nodetags(ctx.node()):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n result[rev] = int(m.group(1))\\n break\\n \\n return result\\n \\n \\n def getdiff(ctx, diffopts):\\n \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n output = util.stringio()\\n for chunk, _label in patch.diffui(\\n ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n ):\\n output.write(chunk)\\n return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n ADD = 1\\n CHANGE = 2\\n DELETE = 3\\n MOVE_AWAY = 4\\n COPY_AWAY = 5\\n MOVE_HERE = 6\\n COPY_HERE = 7\\n MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n TEXT = 1\\n IMAGE = 2\\n BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n \\\"\\\"\\\"\\n \\n oldOffset = attr.ib(default=0) # camelcase-required\\n oldLength = attr.ib(default=0) # camelcase-required\\n newOffset = attr.ib(default=0) # camelcase-required\\n newLength = attr.ib(default=0) # camelcase-required\\n corpus = attr.ib(default='')\\n # These get added to the phabchange's equivalents\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n Differential diff. Each one represents one file in a diff.\\n \\\"\\\"\\\"\\n \\n currentPath = attr.ib(default=None) # camelcase-required\\n oldPath = attr.ib(default=None) # camelcase-required\\n awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required\\n metadata = attr.ib(default=attr.Factory(dict))\\n oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n type = attr.ib(default=DiffChangeType.CHANGE)\\n fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required\\n commitHash = attr.ib(default=None) # camelcase-required\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n hunks = attr.ib(default=attr.Factory(list))\\n \\n def copynewmetadatatoold(self):\\n for key in list(self.metadata.keys()):\\n newkey = key.replace(b'new:', b'old:')\\n self.metadata[newkey] = self.metadata[key]\\n \\n def addoldmode(self, value):\\n self.oldProperties[b'unix:filemode'] = value\\n \\n def addnewmode(self, value):\\n self.newProperties[b'unix:filemode'] = value\\n \\n def addhunk(self, hunk):\\n if not isinstance(hunk, phabhunk):\\n raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n # It's useful to include these stats since the Phab web UI shows them,\\n # and uses them to estimate how large a change a Revision is. Also used\\n # in email subjects for the [+++--] bit.\\n self.addLines += hunk.addLines\\n self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n \\\"\\\"\\\"Represents a Differential diff, owns Differential changes. Corresponds\\n to a commit.\\n \\\"\\\"\\\"\\n \\n # Doesn't seem to be any reason to send this (output of uname -n)\\n sourceMachine = attr.ib(default=b'') # camelcase-required\\n sourcePath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required\\n sourceControlPath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlSystem = attr.ib(default=b'hg') # camelcase-required\\n branch = attr.ib(default=b'default')\\n bookmark = attr.ib(default=None)\\n creationMethod = attr.ib(default=b'phabsend') # camelcase-required\\n lintStatus = attr.ib(default=b'none') # camelcase-required\\n unitStatus = attr.ib(default=b'none') # camelcase-required\\n changes = attr.ib(default=attr.Factory(dict))\\n repositoryPHID = attr.ib(default=None) # camelcase-required\\n \\n def addchange(self, change):\\n if not isinstance(change, phabchange):\\n raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n self.changes[change.currentPath] = pycompat.byteskwargs(\\n attr.asdict(change)\\n )\\n \\n \\n def maketext(pchange, ctx, fname):\\n \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n repo = ctx.repo()\\n fmatcher = match.exact([fname])\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n _pfctx, _fctx, header, fhunks = next(\\n patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n )\\n \\n for fhunk in fhunks:\\n (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n corpus = b''.join(lines[1:])\\n shunk = list(header)\\n shunk.extend(lines)\\n _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n patch.diffstatdata(util.iterlines(shunk))\\n )\\n pchange.addhunk(\\n phabhunk(\\n oldOffset,\\n oldLength,\\n newOffset,\\n newLength,\\n corpus,\\n addLines,\\n delLines,\\n )\\n )\\n \\n \\n def uploadchunks(fctx, fphid):\\n \\\"\\\"\\\"upload large binary files as separate chunks.\\n Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n \\\"\\\"\\\"\\n ui = fctx.repo().ui\\n chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n with ui.makeprogress(\\n _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n ) as progress:\\n for chunk in chunks:\\n progress.increment()\\n if chunk[b'complete']:\\n continue\\n bstart = int(chunk[b'byteStart'])\\n bend = int(chunk[b'byteEnd'])\\n callconduit(\\n ui,\\n b'file.uploadchunk',\\n {\\n b'filePHID': fphid,\\n b'byteStart': bstart,\\n b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n b'dataEncoding': b'base64',\\n },\\n )\\n \\n \\n def uploadfile(fctx):\\n \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n repo = fctx.repo()\\n ui = repo.ui\\n fname = fctx.path()\\n size = fctx.size()\\n fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n # an allocate call is required first to see if an upload is even required\\n # (Phab might already have it) and to determine if chunking is needed\\n allocateparams = {\\n b'name': fname,\\n b'contentLength': size,\\n b'contentHash': fhash,\\n }\\n filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n fphid = filealloc[b'filePHID']\\n \\n if filealloc[b'upload']:\\n ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n if not fphid:\\n uploadparams = {\\n b'name': fname,\\n b'data_base64': base64.b64encode(fctx.data()),\\n }\\n fphid = callconduit(ui, b'file.upload', uploadparams)\\n else:\\n uploadchunks(fctx, fphid)\\n else:\\n ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n if not fphid:\\n raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n phabchange for the new version\\n \\\"\\\"\\\"\\n oldfctx = fctx.p1()\\n if fctx.cmp(oldfctx):\\n # Files differ, add the old one\\n pchange.metadata[b'old:file:size'] = oldfctx.size()\\n mimeguess, _enc = mimetypes.guess_type(\\n encoding.unifromlocal(oldfctx.path())\\n )\\n if mimeguess:\\n pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n mimeguess\\n )\\n fphid = uploadfile(oldfctx)\\n pchange.metadata[b'old:binary-phid'] = fphid\\n else:\\n # If it's left as IMAGE\\/BINARY web UI might try to display it\\n pchange.fileType = DiffFileType.TEXT\\n pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n pchange.fileType = DiffFileType.BINARY\\n fphid = uploadfile(fctx)\\n pchange.metadata[b'new:binary-phid'] = fphid\\n pchange.metadata[b'new:file:size'] = fctx.size()\\n mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n if mimeguess:\\n mimeguess = pycompat.bytestr(mimeguess)\\n pchange.metadata[b'new:file:mime-type'] = mimeguess\\n if mimeguess.startswith(b'image\\/'):\\n pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n as binary\\n \\\"\\\"\\\"\\n try:\\n fctx.data().decode('utf-8')\\n if fctx.parents():\\n fctx.p1().data().decode('utf-8')\\n return False\\n except UnicodeDecodeError:\\n fctx.repo().ui.write(\\n _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n % fctx.path()\\n )\\n return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n for fname in removed:\\n pchange = phabchange(\\n currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n )\\n pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n fctx = ctx.p1()[fname]\\n if not (fctx.isbinary() or notutf8(fctx)):\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n for fname in modified:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname, oldPath=fname)\\n filemode = gitmode[ctx[fname].flags()]\\n originalmode = gitmode[ctx.p1()[fname].flags()]\\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n # Keep track of files that've been recorded as moved\\/copied, so if there are\\n # additional copies we can mark them (moves get removed from removed)\\n copiedchanges = {}\\n movedchanges = {}\\n for fname in added:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname)\\n \\n filemode = gitmode[ctx[fname].flags()]\\n renamed = fctx.renamed()\\n \\n if renamed:\\n originalfname = renamed[0]\\n originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n pchange.oldPath = originalfname\\n \\n if originalfname in removed:\\n origpchange = phabchange(\\n currentPath=originalfname,\\n oldPath=originalfname,\\n type=DiffChangeType.MOVE_AWAY,\\n awayPaths=[fname],\\n )\\n movedchanges[originalfname] = origpchange\\n removed.remove(originalfname)\\n pchange.type = DiffChangeType.MOVE_HERE\\n elif originalfname in movedchanges:\\n movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n movedchanges[originalfname].awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n else: # pure copy\\n if originalfname not in copiedchanges:\\n origpchange = phabchange(\\n currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n )\\n copiedchanges[originalfname] = origpchange\\n else:\\n origpchange = copiedchanges[originalfname]\\n origpchange.awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n \\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n else: # Brand-new file\\n pchange.addnewmode(gitmode[fctx.flags()])\\n pchange.type = DiffChangeType.ADD\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n if renamed:\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n for _path, copiedchange in copiedchanges.items():\\n pdiff.addchange(copiedchange)\\n for _path, movedchange in movedchanges.items():\\n pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n repo = ctx.repo()\\n repophid = getrepophid(repo)\\n # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n pdiff = phabdiff(\\n sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n branch=b'%s' % ctx.branch(),\\n )\\n modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n # addadded will remove moved files from removed, so addremoved won't get\\n # them\\n addadded(pdiff, ctx, added, removed)\\n addmodified(pdiff, ctx, modified)\\n addremoved(pdiff, ctx, removed)\\n if repophid:\\n pdiff.repositoryPHID = repophid\\n diff = callconduit(\\n repo.ui,\\n b'differential.creatediff',\\n pycompat.byteskwargs(attr.asdict(pdiff)),\\n )\\n if not diff:\\n raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n # creatediff returns with a diffid but query returns with an id\\n diffid = diff.get(b'diffid', diff.get(b'id'))\\n params = {\\n b'diff_id': diffid,\\n b'name': b'hg:meta',\\n b'data': templatefilters.json(\\n {\\n b'user': ctx.user(),\\n b'date': b'%d %d' % ctx.date(),\\n b'branch': ctx.branch(),\\n b'node': ctx.hex(),\\n b'parent': ctx.p1().hex(),\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n params = {\\n b'diff_id': diffid,\\n b'name': b'local:commits',\\n b'data': templatefilters.json(\\n {\\n ctx.hex(): {\\n b'author': stringutil.person(ctx.user()),\\n b'authorEmail': stringutil.email(ctx.user()),\\n b'time': int(ctx.date()[0]),\\n b'commit': ctx.hex(),\\n b'parents': [ctx.p1().hex()],\\n b'branch': ctx.branch(),\\n },\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n ctx,\\n revid=None,\\n parentrevphid=None,\\n oldnode=None,\\n olddiff=None,\\n actions=None,\\n comment=None,\\n ):\\n \\\"\\\"\\\"create or update a Differential Revision\\n \\n If revid is None, create a new Differential Revision, otherwise update\\n revid. If parentrevphid is not None, set it as a dependency.\\n \\n If oldnode is not None, check if the patch content (without commit message\\n and metadata) has changed before creating another diff.\\n \\n If actions is not None, they will be appended to the transaction.\\n \\\"\\\"\\\"\\n repo = ctx.repo()\\n if oldnode:\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n oldctx = repo.unfiltered()[oldnode]\\n neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n else:\\n neednewdiff = True\\n \\n transactions = []\\n if neednewdiff:\\n diff = creatediff(ctx)\\n transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n if comment:\\n transactions.append({b'type': b'comment', b'value': comment})\\n else:\\n # Even if we don't need to upload a new diff because the patch content\\n # does not change. We might still need to update its metadata so\\n # pushers could know the correct node metadata.\\n assert olddiff\\n diff = olddiff\\n writediffproperties(ctx, diff)\\n \\n # Set the parent Revision every time, so commit re-ordering is picked-up\\n if parentrevphid:\\n transactions.append(\\n {b'type': b'parents.set', b'value': [parentrevphid]}\\n )\\n \\n if actions:\\n transactions += actions\\n \\n # Parse commit message and update related fields.\\n desc = ctx.description()\\n info = callconduit(\\n repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n )\\n for k, v in info[b'fields'].items():\\n if k in [b'title', b'summary', b'testPlan']:\\n transactions.append({b'type': k, b'value': v})\\n \\n params = {b'transactions': transactions}\\n if revid is not None:\\n # Update an existing Differential Revision\\n params[b'objectIdentifier'] = revid\\n \\n revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n if not revision:\\n raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n return revision, diff\\n \\n \\n-def userphids(repo, names):\\n+def userphids(ui, names):\\n \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n names = [name.lower() for name in names]\\n query = {b'constraints': {b'usernames': names}}\\n- result = callconduit(repo.ui, b'user.search', query)\\n+ result = callconduit(ui, b'user.search', query)\\n # username not found is not an error of the API. So check if we have missed\\n # some names here.\\n data = result[b'data']\\n resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n unresolved = set(names) - resolved\\n if unresolved:\\n raise error.Abort(\\n _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n )\\n return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n b'phabsend',\\n [\\n (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n (b'', b'amend', True, _(b'update commit messages')),\\n (b'', b'reviewer', [], _(b'specify reviewers')),\\n (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n (\\n b'm',\\n b'comment',\\n b'',\\n _(b'add a comment to Revisions with new\\/updated Diffs'),\\n ),\\n (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n ],\\n _(b'REV [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n \\\"\\\"\\\"upload changesets to Phabricator\\n \\n If there are multiple revisions specified, they will be send as a stack\\n with a linear dependencies relationship using the order specified by the\\n revset.\\n \\n For the first time uploading changesets, local tags will be created to\\n maintain the association. After the first time, phabsend will check\\n obsstore and tags information so it can figure out whether to update an\\n existing Differential Revision, or create a new one.\\n \\n If --amend is set, update commit messages so they have the\\n ``Differential Revision`` URL, remove related tags. This is similar to what\\n arcanist will do, and is more desired in author-push workflows. Otherwise,\\n use local tags to record the ``Differential Revision`` association.\\n \\n The --confirm option lets you confirm changesets before sending them. You\\n can also add following to your configuration file to make it default\\n behaviour::\\n \\n [phabsend]\\n confirm = true\\n \\n phabsend will check obsstore and the above association to decide whether to\\n update an existing Differential Revision, or create a new one.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n revs = list(revs) + opts.get(b'rev', [])\\n revs = scmutil.revrange(repo, revs)\\n revs.sort() # ascending order to preserve topological parent\\/child in phab\\n \\n if not revs:\\n raise error.Abort(_(b'phabsend requires at least one changeset'))\\n if opts.get(b'amend'):\\n cmdutil.checkunfinished(repo)\\n \\n # {newnode: (oldnode, olddiff, olddrev}\\n oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n confirm = ui.configbool(b'phabsend', b'confirm')\\n confirm |= bool(opts.get(b'confirm'))\\n if confirm:\\n confirmed = _confirmbeforesend(repo, revs, oldmap)\\n if not confirmed:\\n raise error.Abort(_(b'phabsend cancelled'))\\n \\n actions = []\\n reviewers = opts.get(b'reviewer', [])\\n blockers = opts.get(b'blocker', [])\\n phids = []\\n if reviewers:\\n- phids.extend(userphids(repo, reviewers))\\n+ phids.extend(userphids(repo.ui, reviewers))\\n if blockers:\\n phids.extend(\\n- map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n+ map(\\n+ lambda phid: b'blocking(%s)' % phid,\\n+ userphids(repo.ui, blockers),\\n+ )\\n )\\n if phids:\\n actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n drevids = [] # [int]\\n diffmap = {} # {newnode: diff}\\n \\n # Send patches one by one so we know their Differential Revision PHIDs and\\n # can provide dependency relationship\\n lastrevphid = None\\n for rev in revs:\\n ui.debug(b'sending rev %d\\\\n' % rev)\\n ctx = repo[rev]\\n \\n # Get Differential Revision ID\\n oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n if oldnode != ctx.node() or opts.get(b'amend'):\\n # Create or update Differential Revision\\n revision, diff = createdifferentialrevision(\\n ctx,\\n revid,\\n lastrevphid,\\n oldnode,\\n olddiff,\\n actions,\\n opts.get(b'comment'),\\n )\\n diffmap[ctx.node()] = diff\\n newrevid = int(revision[b'object'][b'id'])\\n newrevphid = revision[b'object'][b'phid']\\n if revid:\\n action = b'updated'\\n else:\\n action = b'created'\\n \\n # Create a local tag to note the association, if commit message\\n # does not have it already\\n m = _differentialrevisiondescre.search(ctx.description())\\n if not m or int(m.group('id')) != newrevid:\\n tagname = b'D%d' % newrevid\\n tags.tag(\\n repo,\\n tagname,\\n ctx.node(),\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n else:\\n # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n # could depend on this one and \\\"newrevid\\\" for the summary line.\\n newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n newrevid = revid\\n action = b'skipped'\\n \\n actiondesc = ui.label(\\n {\\n b'created': _(b'created'),\\n b'skipped': _(b'skipped'),\\n b'updated': _(b'updated'),\\n }[action],\\n b'phabricator.action.%s' % action,\\n )\\n drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n ui.write(\\n _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n )\\n drevids.append(newrevid)\\n lastrevphid = newrevphid\\n \\n # Update commit messages and remove tags\\n if opts.get(b'amend'):\\n unfi = repo.unfiltered()\\n drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n wnode = unfi[b'.'].node()\\n mapping = {} # {oldnode: [newnode]}\\n for i, rev in enumerate(revs):\\n old = unfi[rev]\\n drevid = drevids[i]\\n drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n newdesc = getdescfromdrev(drev)\\n # Make sure commit message contain \\\"Differential Revision\\\"\\n if old.description() != newdesc:\\n if old.phase() == phases.public:\\n ui.warn(\\n _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n % scmutil.formatchangeid(old)\\n )\\n continue\\n parents = [\\n mapping.get(old.p1().node(), (old.p1(),))[0],\\n mapping.get(old.p2().node(), (old.p2(),))[0],\\n ]\\n new = context.metadataonlyctx(\\n repo,\\n old,\\n parents=parents,\\n text=newdesc,\\n user=old.user(),\\n date=old.date(),\\n extra=old.extra(),\\n )\\n \\n newnode = new.commit()\\n \\n mapping[old.node()] = [newnode]\\n # Update diff property\\n # If it fails just warn and keep going, otherwise the DREV\\n # associations will be lost\\n try:\\n writediffproperties(unfi[newnode], diffmap[old.node()])\\n except util.urlerr.urlerror:\\n ui.warnnoi18n(\\n b'Failed to update metadata for D%d\\\\n' % drevid\\n )\\n # Remove local tags since it's no longer necessary\\n tagname = b'D%d' % drevid\\n if tagname in repo.tags():\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n if wnode in mapping:\\n unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n [\\n (b'user', b'User'),\\n (b'date', b'Date'),\\n (b'branch', b'Branch'),\\n (b'node', b'Node ID'),\\n (b'parent', b'Parent '),\\n ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n url, token = readurltoken(repo.ui)\\n ui = repo.ui\\n for rev in revs:\\n ctx = repo[rev]\\n desc = ctx.description().splitlines()[0]\\n oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n if drevid:\\n drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n else:\\n drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n ui.write(\\n _(b'%s - %s: %s\\\\n')\\n % (\\n drevdesc,\\n ui.label(bytes(ctx), b'phabricator.node'),\\n ui.label(desc, b'phabricator.desc'),\\n )\\n )\\n \\n if ui.promptchoice(\\n _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n ):\\n return False\\n \\n return True\\n \\n \\n _knownstatusnames = {\\n b'accepted',\\n b'needsreview',\\n b'needsrevision',\\n b'closed',\\n b'abandoned',\\n b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n # token-type: binding-strength, primary, prefix, infix, suffix\\n b'(': (12, None, (b'group', 1, b')'), None, None),\\n b':': (8, None, (b'ancestors', 8), None, None),\\n b'&': (5, None, None, (b'and_', 5), None),\\n b'+': (4, None, None, (b'add', 4), None),\\n b'-': (4, None, None, (b'sub', 4), None),\\n b')': (0, None, None, None, None),\\n b'symbol': (0, b'symbol', None, None, None),\\n b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n view = memoryview(text) # zero-copy slice\\n special = b'():+-& '\\n pos = 0\\n length = len(text)\\n while pos \\u003c length:\\n symbol = b''.join(\\n itertools.takewhile(\\n lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n )\\n )\\n if symbol:\\n yield (b'symbol', symbol, pos)\\n pos += len(symbol)\\n else: # special char, ignore space\\n if text[pos : pos + 1] != b' ':\\n yield (text[pos : pos + 1], None, pos)\\n pos += 1\\n yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n if pos != len(text):\\n raise error.ParseError(b'invalid token', pos)\\n return tree\\n \\n \\n def _parsedrev(symbol):\\n \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n if symbol.startswith(b'D') and symbol[1:].isdigit():\\n return int(symbol[1:])\\n if symbol.isdigit():\\n return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n drevs = set()\\n ancestordrevs = set()\\n op = tree[0]\\n if op == b'symbol':\\n r = _parsedrev(tree[1])\\n if r:\\n drevs.add(r)\\n elif op == b'ancestors':\\n r, a = _prefetchdrevs(tree[1])\\n drevs.update(r)\\n ancestordrevs.update(r)\\n ancestordrevs.update(a)\\n else:\\n for t in tree[1:]:\\n r, a = _prefetchdrevs(t)\\n drevs.update(r)\\n ancestordrevs.update(a)\\n return drevs, ancestordrevs\\n \\n \\n def querydrev(ui, spec):\\n \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n spec is a string using a simple query language, see docstring in phabread\\n for details.\\n \\n A \\\"Differential Revision dict\\\" looks like:\\n \\n {\\n \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n \\\"auxiliary\\\": {\\n \\\"phabricator:depends-on\\\": [\\n \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n ]\\n \\\"phabricator:projects\\\": [],\\n },\\n \\\"branch\\\": \\\"default\\\",\\n \\\"ccs\\\": [],\\n \\\"commits\\\": [],\\n \\\"dateCreated\\\": \\\"1499181406\\\",\\n \\\"dateModified\\\": \\\"1499182103\\\",\\n \\\"diffs\\\": [\\n \\\"3\\\",\\n \\\"4\\\",\\n ],\\n \\\"hashes\\\": [],\\n \\\"id\\\": \\\"2\\\",\\n \\\"lineCount\\\": \\\"2\\\",\\n \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n \\\"properties\\\": {},\\n \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n \\\"reviewers\\\": [],\\n \\\"sourcePath\\\": null\\n \\\"status\\\": \\\"0\\\",\\n \\\"statusName\\\": \\\"Needs Review\\\",\\n \\\"summary\\\": \\\"\\\",\\n \\\"testPlan\\\": \\\"\\\",\\n \\\"title\\\": \\\"example\\\",\\n \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n }\\n \\\"\\\"\\\"\\n # TODO: replace differential.query and differential.querydiffs with\\n # differential.diff.search because the former (and their output) are\\n # frozen, and planned to be deprecated and removed.\\n \\n def fetch(params):\\n \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n if key in prefetched:\\n return prefetched[key]\\n drevs = callconduit(ui, b'differential.query', params)\\n # Fill prefetched with the result\\n for drev in drevs:\\n prefetched[drev[b'phid']] = drev\\n prefetched[int(drev[b'id'])] = drev\\n if key not in prefetched:\\n raise error.Abort(\\n _(b'cannot get Differential Revision %r') % params\\n )\\n return prefetched[key]\\n \\n def getstack(topdrevids):\\n \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n visited = set()\\n result = []\\n queue = [{b'ids': [i]} for i in topdrevids]\\n while queue:\\n params = queue.pop()\\n drev = fetch(params)\\n if drev[b'id'] in visited:\\n continue\\n visited.add(drev[b'id'])\\n result.append(int(drev[b'id']))\\n auxiliary = drev.get(b'auxiliary', {})\\n depends = auxiliary.get(b'phabricator:depends-on', [])\\n for phid in depends:\\n queue.append({b'phids': [phid]})\\n result.reverse()\\n return smartset.baseset(result)\\n \\n # Initialize prefetch cache\\n prefetched = {} # {id or phid: drev}\\n \\n tree = _parse(spec)\\n drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n # developer config: phabricator.batchsize\\n batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n # Prefetch Differential Revisions in batch\\n tofetch = set(drevs)\\n for r in ancestordrevs:\\n tofetch.update(range(max(1, r - batchsize), r + 1))\\n if drevs:\\n fetch({b'ids': list(tofetch)})\\n validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n # Walk through the tree, return smartsets\\n def walk(tree):\\n op = tree[0]\\n if op == b'symbol':\\n drev = _parsedrev(tree[1])\\n if drev:\\n return smartset.baseset([drev])\\n elif tree[1] in _knownstatusnames:\\n drevs = [\\n r\\n for r in validids\\n if _getstatusname(prefetched[r]) == tree[1]\\n ]\\n return smartset.baseset(drevs)\\n else:\\n raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n elif op in {b'and_', b'add', b'sub'}:\\n assert len(tree) == 3\\n return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n elif op == b'group':\\n return walk(tree[1])\\n elif op == b'ancestors':\\n return getstack(walk(tree[1]))\\n else:\\n raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n This is similar to differential.getcommitmessage API. But we only care\\n about limited fields: title, summary, test plan, and URL.\\n \\\"\\\"\\\"\\n title = drev[b'title']\\n summary = drev[b'summary'].rstrip()\\n testplan = drev[b'testPlan'].rstrip()\\n if testplan:\\n testplan = b'Test Plan:\\\\n%s' % testplan\\n uri = b'Differential Revision: %s' % drev[b'uri']\\n return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n \\\"properties\\\": {\\n \\\"hg:meta\\\": {\\n \\\"branch\\\": \\\"default\\\",\\n \\\"date\\\": \\\"1499571514 25200\\\",\\n \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n }\\n }\\n \\n Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n \\\"properties\\\": {\\n \\\"local:commits\\\": {\\n \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n \\\"author\\\": \\\"Foo Bar\\\",\\n \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n \\\"branch\\\": \\\"default\\\",\\n \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"local\\\": \\\"1000\\\",\\n \\\"message\\\": \\\"...\\\",\\n \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"summary\\\": \\\"...\\\",\\n \\\"tag\\\": \\\"\\\",\\n \\\"time\\\": 1499546314,\\n }\\n }\\n }\\n \\n Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n information.\\n \\\"\\\"\\\"\\n props = diff.get(b'properties') or {}\\n meta = props.get(b'hg:meta')\\n if not meta:\\n if props.get(b'local:commits'):\\n commit = sorted(props[b'local:commits'].values())[0]\\n meta = {}\\n if b'author' in commit and b'authorEmail' in commit:\\n meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n commit[b'author'],\\n commit[b'authorEmail'],\\n )\\n if b'time' in commit:\\n meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n if b'branch' in commit:\\n meta[b'branch'] = commit[b'branch']\\n node = commit.get(b'commit', commit.get(b'rev'))\\n if node:\\n meta[b'node'] = node\\n if len(commit.get(b'parents', ())) \\u003e= 1:\\n meta[b'parent'] = commit[b'parents'][0]\\n else:\\n meta = {}\\n if b'date' not in meta and b'dateCreated' in diff:\\n meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n if b'branch' not in meta and diff.get(b'branch'):\\n meta[b'branch'] = diff[b'branch']\\n if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n return meta\\n \\n \\n def readpatch(ui, drevs, write):\\n \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n \\\"differential.query\\\".\\n \\\"\\\"\\\"\\n # Prefetch hg:meta property for all diffs\\n diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n # Generate patch for each drev\\n for drev in drevs:\\n ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n diffid = max(int(v) for v in drev[b'diffs'])\\n body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n desc = getdescfromdrev(drev)\\n header = b'# HG changeset patch\\\\n'\\n \\n # Try to preserve metadata from hg:meta property. Write hg patch\\n # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n # and extract in mercurial\\/patch.py for supported headers.\\n meta = getdiffmeta(diffs[b'%d' % diffid])\\n for k in _metanamemap.keys():\\n if k in meta:\\n header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n content = b'%s%s\\\\n%s' % (header, desc, body)\\n write(content)\\n \\n \\n @vcrcommand(\\n b'phabread',\\n [(b'', b'stack', False, _(b'read dependencies'))],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n the number ``123``. It could also have common operators like ``+``, ``-``,\\n ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n select a stack.\\n \\n ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n could be used to filter patches by status. For performance reason, they\\n only represent a subset of non-status selections and cannot be used alone.\\n \\n For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n stack up to D9.\\n \\n If --stack is given, follow dependencies information and read all patches.\\n It is equivalent to the ``:`` operator.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n if opts.get(b'stack'):\\n spec = b':(%s)' % spec\\n drevs = querydrev(repo.ui, spec)\\n readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n b'phabupdate',\\n [\\n (b'', b'accept', False, _(b'accept revisions')),\\n (b'', b'reject', False, _(b'reject revisions')),\\n (b'', b'abandon', False, _(b'abandon revisions')),\\n (b'', b'reclaim', False, _(b'reclaim revisions')),\\n (b'm', b'comment', b'', _(b'comment on the last revision')),\\n ],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n \\\"\\\"\\\"update Differential Revision in batch\\n \\n DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n if len(flags) \\u003e 1:\\n raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n actions = []\\n for f in flags:\\n actions.append({b'type': f, b'value': True})\\n \\n drevs = querydrev(repo.ui, spec)\\n for i, drev in enumerate(drevs):\\n if i + 1 == len(drevs) and opts.get(b'comment'):\\n actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n if actions:\\n params = {\\n b'objectIdentifier': drev[b'phid'],\\n b'transactions': actions,\\n }\\n callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n Has attributes `url` and `id`.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n return templateutil.hybriddict(\\n {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n )\\n else:\\n tags = ctx.repo().nodetags(ctx.node())\\n for t in tags:\\n if _differentialrevisiontagre.match(t):\\n url = ctx.repo().ui.config(b'phabricator', b'url')\\n if not url.endswith(b'\\/'):\\n url += b'\\/'\\n url += t\\n \\n return templateutil.hybriddict({b'url': url, b'id': t,})\\n return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n repo = context.resource(mapping, b'repo')\\n ui = context.resource(mapping, b'ui')\\n \\n rev = ctx.rev()\\n try:\\n drevid = getdrevmap(repo, [rev])[rev]\\n except KeyError:\\n return None\\n drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n for drev in drevs:\\n if int(drev[b'id']) == drevid:\\n return templateutil.hybriddict(\\n {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n )\\n return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n revs = repo.revs('sort(_underway(), topo)')\\n drevmap = getdrevmap(repo, revs)\\n unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n for rev, drevid in pycompat.iteritems(drevmap):\\n if drevid is not None:\\n drevids.add(drevid)\\n revsbydrevid.setdefault(drevid, set([])).add(rev)\\n else:\\n unknownrevs.append(rev)\\n \\n drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n drevsbyrev = {}\\n for drev in drevs:\\n for rev in revsbydrevid[int(drev[b'id'])]:\\n drevsbyrev[rev] = drev\\n \\n def phabstatus(ctx):\\n drev = drevsbyrev[ctx.rev()]\\n status = ui.label(\\n b'%(statusName)s' % drev,\\n b'phabricator.status.%s' % _getstatusname(drev),\\n )\\n ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n revs -= smartset.baseset(unknownrevs)\\n revdag = graphmod.dagwalker(repo, revs)\\n \\n ui.setconfig(b'experimental', b'graphshorten', True)\\n displayer._exthook = phabstatus\\n nodelen = show.longestshortest(repo, revs)\\n logcmdutil.displaygraph(\\n ui,\\n repo,\\n revdag,\\n displayer,\\n graphmod.asciiedges,\\n props={b'nodelen': nodelen},\\n )\\n\"}]}],\"properties\":[]},\"20442\":{\"id\":\"20442\",\"revisionID\":\"8206\",\"dateCreated\":\"1581962881\",\"dateModified\":\"1583327825\",\"sourceControlBaseRevision\":\"72c6190de577bb2bb448eb2b14121e4ef85d08ff\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHG9b46270917348950e3fb1e73a5c9e46038065622\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55597\",\"metadata\":{\"line:first\":1186,\"hash.effect\":\"QoC7Ipbh4yf0\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"6\",\"delLines\":\"6\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1799\",\"newLength\":\"1799\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n [phabricator]\\n # Phabricator URL\\n url = https:\\/\\/phab.example.com\\/\\n \\n # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n # callsign is \\\"FOO\\\".\\n callsign = FOO\\n \\n # curl command to use. If not set (default), use builtin HTTP library to\\n # communicate. If set, use the specified curl command. This could be useful\\n # if you need to specify advanced options that is not easily supported by\\n # the internal library.\\n curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n [auth]\\n example.schemes = https\\n example.prefix = phab.example.com\\n \\n # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n cmdutil,\\n context,\\n encoding,\\n error,\\n exthelper,\\n graphmod,\\n httpconnection as httpconnectionmod,\\n localrepo,\\n logcmdutil,\\n match,\\n mdiff,\\n obsutil,\\n parser,\\n patch,\\n phases,\\n pycompat,\\n scmutil,\\n smartset,\\n tags,\\n templatefilters,\\n templateutil,\\n url as urlmod,\\n util,\\n )\\n from mercurial.utils import (\\n procutil,\\n stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n b'phabricator.action.created': b'green',\\n b'phabricator.action.skipped': b'magenta',\\n b'phabricator.action.updated': b'magenta',\\n b'phabricator.desc': b'',\\n b'phabricator.drev': b'bold',\\n b'phabricator.node': b'',\\n b'phabricator.status.abandoned': b'magenta dim',\\n b'phabricator.status.accepted': b'green bold',\\n b'phabricator.status.closed': b'green',\\n b'phabricator.status.needsreview': b'yellow',\\n b'phabricator.status.needsrevision': b'red',\\n b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n (\\n b'',\\n b'test-vcr',\\n b'',\\n _(\\n b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n b', otherwise will mock all http requests using the specified vcr file.'\\n b' (ADVANCED)'\\n ),\\n ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n \\\"\\\"\\\"\\n result = False\\n arcconfig = {}\\n \\n try:\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n # json.loads only returns unicode strings\\n arcconfig = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n \\n result = True\\n except ValueError:\\n ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n except IOError:\\n pass\\n \\n cfg = util.sortdict()\\n \\n if b\\\"repository.callsign\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n if b\\\"phabricator.uri\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n if cfg:\\n ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n fullflags = flags + _VCR_FLAGS\\n \\n def hgmatcher(r1, r2):\\n if r1.uri != r2.uri or r1.method != r2.method:\\n return False\\n r1params = util.urlreq.parseqs(r1.body)\\n r2params = util.urlreq.parseqs(r2.body)\\n for key in r1params:\\n if key not in r2params:\\n return False\\n value = r1params[key][0]\\n # we want to compare json payloads without worrying about ordering\\n if value.startswith(b'{') and value.endswith(b'}'):\\n r1json = pycompat.json_loads(value)\\n r2json = pycompat.json_loads(r2params[key][0])\\n if r1json != r2json:\\n return False\\n elif r2params[key][0] != value:\\n return False\\n return True\\n \\n def sanitiserequest(request):\\n request.body = re.sub(\\n br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n )\\n return request\\n \\n def sanitiseresponse(response):\\n if 'set-cookie' in response['headers']:\\n del response['headers']['set-cookie']\\n return response\\n \\n def decorate(fn):\\n def inner(*args, **kwargs):\\n cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n if cassette:\\n import hgdemandimport\\n \\n with hgdemandimport.deactivated():\\n import vcr as vcrmod\\n import vcr.stubs as stubs\\n \\n vcr = vcrmod.VCR(\\n serializer='json',\\n before_record_request=sanitiserequest,\\n before_record_response=sanitiseresponse,\\n custom_patches=[\\n (\\n urlmod,\\n 'httpconnection',\\n stubs.VCRHTTPConnection,\\n ),\\n (\\n urlmod,\\n 'httpsconnection',\\n stubs.VCRHTTPSConnection,\\n ),\\n ],\\n )\\n vcr.register_matcher('hgmatcher', hgmatcher)\\n with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n return fn(*args, **kwargs)\\n return fn(*args, **kwargs)\\n \\n inner.__name__ = fn.__name__\\n inner.__doc__ = fn.__doc__\\n return command(\\n name,\\n fullflags,\\n spec,\\n helpcategory=helpcategory,\\n optionalrepo=optionalrepo,\\n )(inner)\\n \\n return decorate\\n \\n \\n def urlencodenested(params):\\n \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n \\\"\\\"\\\"\\n flatparams = util.sortdict()\\n \\n def process(prefix, obj):\\n if isinstance(obj, bool):\\n obj = {True: b'true', False: b'false'}[obj] # Python -\\u003e PHP form\\n lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n if items is None:\\n flatparams[prefix] = obj\\n else:\\n for k, v in items(obj):\\n if prefix:\\n process(b'%s[%s]' % (prefix, k), v)\\n else:\\n process(k, v)\\n \\n process(b'', params)\\n return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n Currently read from [auth] config section. In the future, it might\\n make sense to read from .arcconfig and .arcrc as well.\\n \\\"\\\"\\\"\\n url = ui.config(b'phabricator', b'url')\\n if not url:\\n raise error.Abort(\\n _(b'config %s.%s is required') % (b'phabricator', b'url')\\n )\\n \\n res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n token = None\\n \\n if res:\\n group, auth = res\\n \\n ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n token = auth.get(b'phabtoken')\\n \\n if not token:\\n raise error.Abort(\\n _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n )\\n \\n return url, token\\n \\n \\n def callconduit(ui, name, params):\\n \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n host, token = readurltoken(ui)\\n url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n params = params.copy()\\n params[b'__conduit__'] = {\\n b'token': token,\\n }\\n rawdata = {\\n b'params': templatefilters.json(params),\\n b'output': b'json',\\n b'__conduit__': 1,\\n }\\n data = urlencodenested(rawdata)\\n curlcmd = ui.config(b'phabricator', b'curlcmd')\\n if curlcmd:\\n sin, sout = procutil.popen2(\\n b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n )\\n sin.write(data)\\n sin.close()\\n body = sout.read()\\n else:\\n urlopener = urlmod.opener(ui, authinfo)\\n request = util.urlreq.request(pycompat.strurl(url), data=data)\\n with contextlib.closing(urlopener.open(request)) as rsp:\\n body = rsp.read()\\n ui.debug(b'Conduit Response: %s\\\\n' % body)\\n parsed = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n # json.loads only accepts bytes from py3.6+\\n pycompat.json_loads(encoding.unifromlocal(body)),\\n )\\n if parsed.get(b'error_code'):\\n msg = _(b'Conduit Error (%s): %s') % (\\n parsed[b'error_code'],\\n parsed[b'error_info'],\\n )\\n raise error.Abort(msg)\\n return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n \\\"\\\"\\\"call Conduit API\\n \\n Call parameters are read from stdin as a JSON blob. Result will be written\\n to stdout as a JSON blob.\\n \\\"\\\"\\\"\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(ui.fin.read())\\n # json.loads only returns unicode strings\\n params = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n # json.dumps only accepts unicode strings\\n result = pycompat.rapply(\\n lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n callconduit(ui, name, params),\\n )\\n s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n # developer config: phabricator.repophid\\n repophid = repo.ui.config(b'phabricator', b'repophid')\\n if repophid:\\n return repophid\\n callsign = repo.ui.config(b'phabricator', b'callsign')\\n if not callsign:\\n return None\\n query = callconduit(\\n repo.ui,\\n b'diffusion.repository.search',\\n {b'constraints': {b'callsigns': [callsign]}},\\n )\\n if len(query[b'data']) == 0:\\n return None\\n repophid = query[b'data'][0][b'phid']\\n repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n return {node: (oldnode, Differential diff, Differential Revision ID)}\\n for node in nodelist with known previous sent versions, or associated\\n Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n be ``None``.\\n \\n Examines commit messages like \\\"Differential Revision:\\\" to get the\\n association information.\\n \\n If such commit message line is not found, examines all precursors and their\\n tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n The ``old node``, if not None, is guaranteed to be the last diff of\\n corresponding Differential Revision, and exist in the repo.\\n \\\"\\\"\\\"\\n unfi = repo.unfiltered()\\n has_node = unfi.changelog.index.has_node\\n \\n result = {} # {node: (oldnode?, lastdiff?, drev)}\\n toconfirm = {} # {node: (force, {precnode}, drev)}\\n for node in nodelist:\\n ctx = unfi[node]\\n # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n for n in precnodes:\\n if has_node(n):\\n for tag in unfi.nodetags(n):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n break\\n else:\\n continue # move to next predecessor\\n break # found a tag, stop\\n else:\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n # Double check if tags are genuine by collecting all old nodes from\\n # Phabricator, and expect precursors overlap with it.\\n if toconfirm:\\n drevs = [drev for force, precs, drev in toconfirm.values()]\\n alldiffs = callconduit(\\n unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n )\\n getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n for newnode, (force, precset, drev) in toconfirm.items():\\n diffs = [\\n d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n ]\\n \\n # \\\"precursors\\\" as known by Phabricator\\n phprecset = set(getnode(d) for d in diffs)\\n \\n # Ignore if precursors (Phabricator and local repo) do not overlap,\\n # and force is not set (when commit message says nothing)\\n if not force and not bool(phprecset & precset):\\n tagname = b'D%d' % drev\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n unfi.ui.warn(\\n _(\\n b'D%d: local tag removed - does not match '\\n b'Differential history\\\\n'\\n )\\n % drev\\n )\\n continue\\n \\n # Find the last node using Phabricator metadata, and make sure it\\n # exists in the repo\\n oldnode = lastdiff = None\\n if diffs:\\n lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n oldnode = getnode(lastdiff)\\n if oldnode and not has_node(oldnode):\\n oldnode = None\\n \\n result[newnode] = (oldnode, lastdiff, drev)\\n \\n return result\\n \\n \\n def getdrevmap(repo, revs):\\n \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n ID or None.\\n \\\"\\\"\\\"\\n result = {}\\n for rev in revs:\\n result[rev] = None\\n ctx = repo[rev]\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n result[rev] = int(m.group('id'))\\n continue\\n # Check tags\\n for tag in repo.nodetags(ctx.node()):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n result[rev] = int(m.group(1))\\n break\\n \\n return result\\n \\n \\n def getdiff(ctx, diffopts):\\n \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n output = util.stringio()\\n for chunk, _label in patch.diffui(\\n ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n ):\\n output.write(chunk)\\n return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n ADD = 1\\n CHANGE = 2\\n DELETE = 3\\n MOVE_AWAY = 4\\n COPY_AWAY = 5\\n MOVE_HERE = 6\\n COPY_HERE = 7\\n MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n TEXT = 1\\n IMAGE = 2\\n BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n \\\"\\\"\\\"\\n \\n oldOffset = attr.ib(default=0) # camelcase-required\\n oldLength = attr.ib(default=0) # camelcase-required\\n newOffset = attr.ib(default=0) # camelcase-required\\n newLength = attr.ib(default=0) # camelcase-required\\n corpus = attr.ib(default='')\\n # These get added to the phabchange's equivalents\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n Differential diff. Each one represents one file in a diff.\\n \\\"\\\"\\\"\\n \\n currentPath = attr.ib(default=None) # camelcase-required\\n oldPath = attr.ib(default=None) # camelcase-required\\n awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required\\n metadata = attr.ib(default=attr.Factory(dict))\\n oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n type = attr.ib(default=DiffChangeType.CHANGE)\\n fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required\\n commitHash = attr.ib(default=None) # camelcase-required\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n hunks = attr.ib(default=attr.Factory(list))\\n \\n def copynewmetadatatoold(self):\\n for key in list(self.metadata.keys()):\\n newkey = key.replace(b'new:', b'old:')\\n self.metadata[newkey] = self.metadata[key]\\n \\n def addoldmode(self, value):\\n self.oldProperties[b'unix:filemode'] = value\\n \\n def addnewmode(self, value):\\n self.newProperties[b'unix:filemode'] = value\\n \\n def addhunk(self, hunk):\\n if not isinstance(hunk, phabhunk):\\n raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n # It's useful to include these stats since the Phab web UI shows them,\\n # and uses them to estimate how large a change a Revision is. Also used\\n # in email subjects for the [+++--] bit.\\n self.addLines += hunk.addLines\\n self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n \\\"\\\"\\\"Represents a Differential diff, owns Differential changes. Corresponds\\n to a commit.\\n \\\"\\\"\\\"\\n \\n # Doesn't seem to be any reason to send this (output of uname -n)\\n sourceMachine = attr.ib(default=b'') # camelcase-required\\n sourcePath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required\\n sourceControlPath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlSystem = attr.ib(default=b'hg') # camelcase-required\\n branch = attr.ib(default=b'default')\\n bookmark = attr.ib(default=None)\\n creationMethod = attr.ib(default=b'phabsend') # camelcase-required\\n lintStatus = attr.ib(default=b'none') # camelcase-required\\n unitStatus = attr.ib(default=b'none') # camelcase-required\\n changes = attr.ib(default=attr.Factory(dict))\\n repositoryPHID = attr.ib(default=None) # camelcase-required\\n \\n def addchange(self, change):\\n if not isinstance(change, phabchange):\\n raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n self.changes[change.currentPath] = pycompat.byteskwargs(\\n attr.asdict(change)\\n )\\n \\n \\n def maketext(pchange, ctx, fname):\\n \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n repo = ctx.repo()\\n fmatcher = match.exact([fname])\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n _pfctx, _fctx, header, fhunks = next(\\n patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n )\\n \\n for fhunk in fhunks:\\n (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n corpus = b''.join(lines[1:])\\n shunk = list(header)\\n shunk.extend(lines)\\n _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n patch.diffstatdata(util.iterlines(shunk))\\n )\\n pchange.addhunk(\\n phabhunk(\\n oldOffset,\\n oldLength,\\n newOffset,\\n newLength,\\n corpus,\\n addLines,\\n delLines,\\n )\\n )\\n \\n \\n def uploadchunks(fctx, fphid):\\n \\\"\\\"\\\"upload large binary files as separate chunks.\\n Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n \\\"\\\"\\\"\\n ui = fctx.repo().ui\\n chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n with ui.makeprogress(\\n _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n ) as progress:\\n for chunk in chunks:\\n progress.increment()\\n if chunk[b'complete']:\\n continue\\n bstart = int(chunk[b'byteStart'])\\n bend = int(chunk[b'byteEnd'])\\n callconduit(\\n ui,\\n b'file.uploadchunk',\\n {\\n b'filePHID': fphid,\\n b'byteStart': bstart,\\n b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n b'dataEncoding': b'base64',\\n },\\n )\\n \\n \\n def uploadfile(fctx):\\n \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n repo = fctx.repo()\\n ui = repo.ui\\n fname = fctx.path()\\n size = fctx.size()\\n fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n # an allocate call is required first to see if an upload is even required\\n # (Phab might already have it) and to determine if chunking is needed\\n allocateparams = {\\n b'name': fname,\\n b'contentLength': size,\\n b'contentHash': fhash,\\n }\\n filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n fphid = filealloc[b'filePHID']\\n \\n if filealloc[b'upload']:\\n ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n if not fphid:\\n uploadparams = {\\n b'name': fname,\\n b'data_base64': base64.b64encode(fctx.data()),\\n }\\n fphid = callconduit(ui, b'file.upload', uploadparams)\\n else:\\n uploadchunks(fctx, fphid)\\n else:\\n ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n if not fphid:\\n raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n phabchange for the new version\\n \\\"\\\"\\\"\\n oldfctx = fctx.p1()\\n if fctx.cmp(oldfctx):\\n # Files differ, add the old one\\n pchange.metadata[b'old:file:size'] = oldfctx.size()\\n mimeguess, _enc = mimetypes.guess_type(\\n encoding.unifromlocal(oldfctx.path())\\n )\\n if mimeguess:\\n pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n mimeguess\\n )\\n fphid = uploadfile(oldfctx)\\n pchange.metadata[b'old:binary-phid'] = fphid\\n else:\\n # If it's left as IMAGE\\/BINARY web UI might try to display it\\n pchange.fileType = DiffFileType.TEXT\\n pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n pchange.fileType = DiffFileType.BINARY\\n fphid = uploadfile(fctx)\\n pchange.metadata[b'new:binary-phid'] = fphid\\n pchange.metadata[b'new:file:size'] = fctx.size()\\n mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n if mimeguess:\\n mimeguess = pycompat.bytestr(mimeguess)\\n pchange.metadata[b'new:file:mime-type'] = mimeguess\\n if mimeguess.startswith(b'image\\/'):\\n pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n as binary\\n \\\"\\\"\\\"\\n try:\\n fctx.data().decode('utf-8')\\n if fctx.parents():\\n fctx.p1().data().decode('utf-8')\\n return False\\n except UnicodeDecodeError:\\n fctx.repo().ui.write(\\n _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n % fctx.path()\\n )\\n return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n for fname in removed:\\n pchange = phabchange(\\n currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n )\\n pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n fctx = ctx.p1()[fname]\\n if not (fctx.isbinary() or notutf8(fctx)):\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n for fname in modified:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname, oldPath=fname)\\n filemode = gitmode[ctx[fname].flags()]\\n originalmode = gitmode[ctx.p1()[fname].flags()]\\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n # Keep track of files that've been recorded as moved\\/copied, so if there are\\n # additional copies we can mark them (moves get removed from removed)\\n copiedchanges = {}\\n movedchanges = {}\\n for fname in added:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname)\\n \\n filemode = gitmode[ctx[fname].flags()]\\n renamed = fctx.renamed()\\n \\n if renamed:\\n originalfname = renamed[0]\\n originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n pchange.oldPath = originalfname\\n \\n if originalfname in removed:\\n origpchange = phabchange(\\n currentPath=originalfname,\\n oldPath=originalfname,\\n type=DiffChangeType.MOVE_AWAY,\\n awayPaths=[fname],\\n )\\n movedchanges[originalfname] = origpchange\\n removed.remove(originalfname)\\n pchange.type = DiffChangeType.MOVE_HERE\\n elif originalfname in movedchanges:\\n movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n movedchanges[originalfname].awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n else: # pure copy\\n if originalfname not in copiedchanges:\\n origpchange = phabchange(\\n currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n )\\n copiedchanges[originalfname] = origpchange\\n else:\\n origpchange = copiedchanges[originalfname]\\n origpchange.awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n \\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n else: # Brand-new file\\n pchange.addnewmode(gitmode[fctx.flags()])\\n pchange.type = DiffChangeType.ADD\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n if renamed:\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n for _path, copiedchange in copiedchanges.items():\\n pdiff.addchange(copiedchange)\\n for _path, movedchange in movedchanges.items():\\n pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n repo = ctx.repo()\\n repophid = getrepophid(repo)\\n # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n pdiff = phabdiff(\\n sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n branch=b'%s' % ctx.branch(),\\n )\\n modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n # addadded will remove moved files from removed, so addremoved won't get\\n # them\\n addadded(pdiff, ctx, added, removed)\\n addmodified(pdiff, ctx, modified)\\n addremoved(pdiff, ctx, removed)\\n if repophid:\\n pdiff.repositoryPHID = repophid\\n diff = callconduit(\\n repo.ui,\\n b'differential.creatediff',\\n pycompat.byteskwargs(attr.asdict(pdiff)),\\n )\\n if not diff:\\n raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n # creatediff returns with a diffid but query returns with an id\\n diffid = diff.get(b'diffid', diff.get(b'id'))\\n params = {\\n b'diff_id': diffid,\\n b'name': b'hg:meta',\\n b'data': templatefilters.json(\\n {\\n b'user': ctx.user(),\\n b'date': b'%d %d' % ctx.date(),\\n b'branch': ctx.branch(),\\n b'node': ctx.hex(),\\n b'parent': ctx.p1().hex(),\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n params = {\\n b'diff_id': diffid,\\n b'name': b'local:commits',\\n b'data': templatefilters.json(\\n {\\n ctx.hex(): {\\n b'author': stringutil.person(ctx.user()),\\n b'authorEmail': stringutil.email(ctx.user()),\\n b'time': int(ctx.date()[0]),\\n b'commit': ctx.hex(),\\n b'parents': [ctx.p1().hex()],\\n b'branch': ctx.branch(),\\n },\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n ctx,\\n revid=None,\\n parentrevphid=None,\\n oldnode=None,\\n olddiff=None,\\n actions=None,\\n comment=None,\\n ):\\n \\\"\\\"\\\"create or update a Differential Revision\\n \\n If revid is None, create a new Differential Revision, otherwise update\\n revid. If parentrevphid is not None, set it as a dependency.\\n \\n If oldnode is not None, check if the patch content (without commit message\\n and metadata) has changed before creating another diff.\\n \\n If actions is not None, they will be appended to the transaction.\\n \\\"\\\"\\\"\\n repo = ctx.repo()\\n if oldnode:\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n oldctx = repo.unfiltered()[oldnode]\\n neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n else:\\n neednewdiff = True\\n \\n transactions = []\\n if neednewdiff:\\n diff = creatediff(ctx)\\n transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n if comment:\\n transactions.append({b'type': b'comment', b'value': comment})\\n else:\\n # Even if we don't need to upload a new diff because the patch content\\n # does not change. We might still need to update its metadata so\\n # pushers could know the correct node metadata.\\n assert olddiff\\n diff = olddiff\\n writediffproperties(ctx, diff)\\n \\n # Set the parent Revision every time, so commit re-ordering is picked-up\\n if parentrevphid:\\n transactions.append(\\n {b'type': b'parents.set', b'value': [parentrevphid]}\\n )\\n \\n if actions:\\n transactions += actions\\n \\n # Parse commit message and update related fields.\\n desc = ctx.description()\\n info = callconduit(\\n repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n )\\n for k, v in info[b'fields'].items():\\n if k in [b'title', b'summary', b'testPlan']:\\n transactions.append({b'type': k, b'value': v})\\n \\n params = {b'transactions': transactions}\\n if revid is not None:\\n # Update an existing Differential Revision\\n params[b'objectIdentifier'] = revid\\n \\n revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n if not revision:\\n raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n return revision, diff\\n \\n \\n def userphids(repo, names):\\n \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n names = [name.lower() for name in names]\\n query = {b'constraints': {b'usernames': names}}\\n result = callconduit(repo.ui, b'user.search', query)\\n # username not found is not an error of the API. So check if we have missed\\n # some names here.\\n data = result[b'data']\\n resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n unresolved = set(names) - resolved\\n if unresolved:\\n raise error.Abort(\\n _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n )\\n return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n b'phabsend',\\n [\\n (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n (b'', b'amend', True, _(b'update commit messages')),\\n (b'', b'reviewer', [], _(b'specify reviewers')),\\n (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n (\\n b'm',\\n b'comment',\\n b'',\\n _(b'add a comment to Revisions with new\\/updated Diffs'),\\n ),\\n (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n ],\\n _(b'REV [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n \\\"\\\"\\\"upload changesets to Phabricator\\n \\n If there are multiple revisions specified, they will be send as a stack\\n with a linear dependencies relationship using the order specified by the\\n revset.\\n \\n For the first time uploading changesets, local tags will be created to\\n maintain the association. After the first time, phabsend will check\\n obsstore and tags information so it can figure out whether to update an\\n existing Differential Revision, or create a new one.\\n \\n If --amend is set, update commit messages so they have the\\n ``Differential Revision`` URL, remove related tags. This is similar to what\\n arcanist will do, and is more desired in author-push workflows. Otherwise,\\n use local tags to record the ``Differential Revision`` association.\\n \\n The --confirm option lets you confirm changesets before sending them. You\\n can also add following to your configuration file to make it default\\n behaviour::\\n \\n [phabsend]\\n confirm = true\\n \\n phabsend will check obsstore and the above association to decide whether to\\n update an existing Differential Revision, or create a new one.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n revs = list(revs) + opts.get(b'rev', [])\\n revs = scmutil.revrange(repo, revs)\\n revs.sort() # ascending order to preserve topological parent\\/child in phab\\n \\n if not revs:\\n raise error.Abort(_(b'phabsend requires at least one changeset'))\\n if opts.get(b'amend'):\\n cmdutil.checkunfinished(repo)\\n \\n # {newnode: (oldnode, olddiff, olddrev}\\n oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n confirm = ui.configbool(b'phabsend', b'confirm')\\n confirm |= bool(opts.get(b'confirm'))\\n if confirm:\\n confirmed = _confirmbeforesend(repo, revs, oldmap)\\n if not confirmed:\\n raise error.Abort(_(b'phabsend cancelled'))\\n \\n actions = []\\n reviewers = opts.get(b'reviewer', [])\\n blockers = opts.get(b'blocker', [])\\n phids = []\\n if reviewers:\\n phids.extend(userphids(repo, reviewers))\\n if blockers:\\n phids.extend(\\n map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n )\\n if phids:\\n actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n drevids = [] # [int]\\n diffmap = {} # {newnode: diff}\\n \\n # Send patches one by one so we know their Differential Revision PHIDs and\\n # can provide dependency relationship\\n lastrevphid = None\\n for rev in revs:\\n ui.debug(b'sending rev %d\\\\n' % rev)\\n ctx = repo[rev]\\n \\n # Get Differential Revision ID\\n oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n if oldnode != ctx.node() or opts.get(b'amend'):\\n # Create or update Differential Revision\\n revision, diff = createdifferentialrevision(\\n ctx,\\n revid,\\n lastrevphid,\\n oldnode,\\n olddiff,\\n actions,\\n opts.get(b'comment'),\\n )\\n diffmap[ctx.node()] = diff\\n newrevid = int(revision[b'object'][b'id'])\\n newrevphid = revision[b'object'][b'phid']\\n if revid:\\n action = b'updated'\\n else:\\n action = b'created'\\n \\n # Create a local tag to note the association, if commit message\\n # does not have it already\\n m = _differentialrevisiondescre.search(ctx.description())\\n if not m or int(m.group('id')) != newrevid:\\n tagname = b'D%d' % newrevid\\n tags.tag(\\n repo,\\n tagname,\\n ctx.node(),\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n else:\\n # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n # could depend on this one and \\\"newrevid\\\" for the summary line.\\n- newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n+ newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n newrevid = revid\\n action = b'skipped'\\n \\n actiondesc = ui.label(\\n {\\n b'created': _(b'created'),\\n b'skipped': _(b'skipped'),\\n b'updated': _(b'updated'),\\n }[action],\\n b'phabricator.action.%s' % action,\\n )\\n drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n ui.write(\\n _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n )\\n drevids.append(newrevid)\\n lastrevphid = newrevphid\\n \\n # Update commit messages and remove tags\\n if opts.get(b'amend'):\\n unfi = repo.unfiltered()\\n drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n wnode = unfi[b'.'].node()\\n mapping = {} # {oldnode: [newnode]}\\n for i, rev in enumerate(revs):\\n old = unfi[rev]\\n drevid = drevids[i]\\n drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n newdesc = getdescfromdrev(drev)\\n # Make sure commit message contain \\\"Differential Revision\\\"\\n if old.description() != newdesc:\\n if old.phase() == phases.public:\\n ui.warn(\\n _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n % scmutil.formatchangeid(old)\\n )\\n continue\\n parents = [\\n mapping.get(old.p1().node(), (old.p1(),))[0],\\n mapping.get(old.p2().node(), (old.p2(),))[0],\\n ]\\n new = context.metadataonlyctx(\\n repo,\\n old,\\n parents=parents,\\n text=newdesc,\\n user=old.user(),\\n date=old.date(),\\n extra=old.extra(),\\n )\\n \\n newnode = new.commit()\\n \\n mapping[old.node()] = [newnode]\\n # Update diff property\\n # If it fails just warn and keep going, otherwise the DREV\\n # associations will be lost\\n try:\\n writediffproperties(unfi[newnode], diffmap[old.node()])\\n except util.urlerr.urlerror:\\n ui.warnnoi18n(\\n b'Failed to update metadata for D%d\\\\n' % drevid\\n )\\n # Remove local tags since it's no longer necessary\\n tagname = b'D%d' % drevid\\n if tagname in repo.tags():\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n if wnode in mapping:\\n unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n [\\n (b'user', b'User'),\\n (b'date', b'Date'),\\n (b'branch', b'Branch'),\\n (b'node', b'Node ID'),\\n (b'parent', b'Parent '),\\n ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n url, token = readurltoken(repo.ui)\\n ui = repo.ui\\n for rev in revs:\\n ctx = repo[rev]\\n desc = ctx.description().splitlines()[0]\\n oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n if drevid:\\n drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n else:\\n drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n ui.write(\\n _(b'%s - %s: %s\\\\n')\\n % (\\n drevdesc,\\n ui.label(bytes(ctx), b'phabricator.node'),\\n ui.label(desc, b'phabricator.desc'),\\n )\\n )\\n \\n if ui.promptchoice(\\n _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n ):\\n return False\\n \\n return True\\n \\n \\n _knownstatusnames = {\\n b'accepted',\\n b'needsreview',\\n b'needsrevision',\\n b'closed',\\n b'abandoned',\\n b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n # token-type: binding-strength, primary, prefix, infix, suffix\\n b'(': (12, None, (b'group', 1, b')'), None, None),\\n b':': (8, None, (b'ancestors', 8), None, None),\\n b'&': (5, None, None, (b'and_', 5), None),\\n b'+': (4, None, None, (b'add', 4), None),\\n b'-': (4, None, None, (b'sub', 4), None),\\n b')': (0, None, None, None, None),\\n b'symbol': (0, b'symbol', None, None, None),\\n b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n view = memoryview(text) # zero-copy slice\\n special = b'():+-& '\\n pos = 0\\n length = len(text)\\n while pos \\u003c length:\\n symbol = b''.join(\\n itertools.takewhile(\\n lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n )\\n )\\n if symbol:\\n yield (b'symbol', symbol, pos)\\n pos += len(symbol)\\n else: # special char, ignore space\\n if text[pos : pos + 1] != b' ':\\n yield (text[pos : pos + 1], None, pos)\\n pos += 1\\n yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n if pos != len(text):\\n raise error.ParseError(b'invalid token', pos)\\n return tree\\n \\n \\n def _parsedrev(symbol):\\n \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n if symbol.startswith(b'D') and symbol[1:].isdigit():\\n return int(symbol[1:])\\n if symbol.isdigit():\\n return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n drevs = set()\\n ancestordrevs = set()\\n op = tree[0]\\n if op == b'symbol':\\n r = _parsedrev(tree[1])\\n if r:\\n drevs.add(r)\\n elif op == b'ancestors':\\n r, a = _prefetchdrevs(tree[1])\\n drevs.update(r)\\n ancestordrevs.update(r)\\n ancestordrevs.update(a)\\n else:\\n for t in tree[1:]:\\n r, a = _prefetchdrevs(t)\\n drevs.update(r)\\n ancestordrevs.update(a)\\n return drevs, ancestordrevs\\n \\n \\n-def querydrev(repo, spec):\\n+def querydrev(ui, spec):\\n \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n spec is a string using a simple query language, see docstring in phabread\\n for details.\\n \\n A \\\"Differential Revision dict\\\" looks like:\\n \\n {\\n \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n \\\"auxiliary\\\": {\\n \\\"phabricator:depends-on\\\": [\\n \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n ]\\n \\\"phabricator:projects\\\": [],\\n },\\n \\\"branch\\\": \\\"default\\\",\\n \\\"ccs\\\": [],\\n \\\"commits\\\": [],\\n \\\"dateCreated\\\": \\\"1499181406\\\",\\n \\\"dateModified\\\": \\\"1499182103\\\",\\n \\\"diffs\\\": [\\n \\\"3\\\",\\n \\\"4\\\",\\n ],\\n \\\"hashes\\\": [],\\n \\\"id\\\": \\\"2\\\",\\n \\\"lineCount\\\": \\\"2\\\",\\n \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n \\\"properties\\\": {},\\n \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n \\\"reviewers\\\": [],\\n \\\"sourcePath\\\": null\\n \\\"status\\\": \\\"0\\\",\\n \\\"statusName\\\": \\\"Needs Review\\\",\\n \\\"summary\\\": \\\"\\\",\\n \\\"testPlan\\\": \\\"\\\",\\n \\\"title\\\": \\\"example\\\",\\n \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n }\\n \\\"\\\"\\\"\\n # TODO: replace differential.query and differential.querydiffs with\\n # differential.diff.search because the former (and their output) are\\n # frozen, and planned to be deprecated and removed.\\n \\n def fetch(params):\\n \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n if key in prefetched:\\n return prefetched[key]\\n- drevs = callconduit(repo.ui, b'differential.query', params)\\n+ drevs = callconduit(ui, b'differential.query', params)\\n # Fill prefetched with the result\\n for drev in drevs:\\n prefetched[drev[b'phid']] = drev\\n prefetched[int(drev[b'id'])] = drev\\n if key not in prefetched:\\n raise error.Abort(\\n _(b'cannot get Differential Revision %r') % params\\n )\\n return prefetched[key]\\n \\n def getstack(topdrevids):\\n \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n visited = set()\\n result = []\\n queue = [{b'ids': [i]} for i in topdrevids]\\n while queue:\\n params = queue.pop()\\n drev = fetch(params)\\n if drev[b'id'] in visited:\\n continue\\n visited.add(drev[b'id'])\\n result.append(int(drev[b'id']))\\n auxiliary = drev.get(b'auxiliary', {})\\n depends = auxiliary.get(b'phabricator:depends-on', [])\\n for phid in depends:\\n queue.append({b'phids': [phid]})\\n result.reverse()\\n return smartset.baseset(result)\\n \\n # Initialize prefetch cache\\n prefetched = {} # {id or phid: drev}\\n \\n tree = _parse(spec)\\n drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n # developer config: phabricator.batchsize\\n- batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n+ batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n # Prefetch Differential Revisions in batch\\n tofetch = set(drevs)\\n for r in ancestordrevs:\\n tofetch.update(range(max(1, r - batchsize), r + 1))\\n if drevs:\\n fetch({b'ids': list(tofetch)})\\n validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n # Walk through the tree, return smartsets\\n def walk(tree):\\n op = tree[0]\\n if op == b'symbol':\\n drev = _parsedrev(tree[1])\\n if drev:\\n return smartset.baseset([drev])\\n elif tree[1] in _knownstatusnames:\\n drevs = [\\n r\\n for r in validids\\n if _getstatusname(prefetched[r]) == tree[1]\\n ]\\n return smartset.baseset(drevs)\\n else:\\n raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n elif op in {b'and_', b'add', b'sub'}:\\n assert len(tree) == 3\\n return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n elif op == b'group':\\n return walk(tree[1])\\n elif op == b'ancestors':\\n return getstack(walk(tree[1]))\\n else:\\n raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n This is similar to differential.getcommitmessage API. But we only care\\n about limited fields: title, summary, test plan, and URL.\\n \\\"\\\"\\\"\\n title = drev[b'title']\\n summary = drev[b'summary'].rstrip()\\n testplan = drev[b'testPlan'].rstrip()\\n if testplan:\\n testplan = b'Test Plan:\\\\n%s' % testplan\\n uri = b'Differential Revision: %s' % drev[b'uri']\\n return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n \\\"properties\\\": {\\n \\\"hg:meta\\\": {\\n \\\"branch\\\": \\\"default\\\",\\n \\\"date\\\": \\\"1499571514 25200\\\",\\n \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n }\\n }\\n \\n Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n \\\"properties\\\": {\\n \\\"local:commits\\\": {\\n \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n \\\"author\\\": \\\"Foo Bar\\\",\\n \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n \\\"branch\\\": \\\"default\\\",\\n \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"local\\\": \\\"1000\\\",\\n \\\"message\\\": \\\"...\\\",\\n \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"summary\\\": \\\"...\\\",\\n \\\"tag\\\": \\\"\\\",\\n \\\"time\\\": 1499546314,\\n }\\n }\\n }\\n \\n Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n information.\\n \\\"\\\"\\\"\\n props = diff.get(b'properties') or {}\\n meta = props.get(b'hg:meta')\\n if not meta:\\n if props.get(b'local:commits'):\\n commit = sorted(props[b'local:commits'].values())[0]\\n meta = {}\\n if b'author' in commit and b'authorEmail' in commit:\\n meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n commit[b'author'],\\n commit[b'authorEmail'],\\n )\\n if b'time' in commit:\\n meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n if b'branch' in commit:\\n meta[b'branch'] = commit[b'branch']\\n node = commit.get(b'commit', commit.get(b'rev'))\\n if node:\\n meta[b'node'] = node\\n if len(commit.get(b'parents', ())) \\u003e= 1:\\n meta[b'parent'] = commit[b'parents'][0]\\n else:\\n meta = {}\\n if b'date' not in meta and b'dateCreated' in diff:\\n meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n if b'branch' not in meta and diff.get(b'branch'):\\n meta[b'branch'] = diff[b'branch']\\n if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n return meta\\n \\n \\n def readpatch(ui, drevs, write):\\n \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n \\\"differential.query\\\".\\n \\\"\\\"\\\"\\n # Prefetch hg:meta property for all diffs\\n diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n # Generate patch for each drev\\n for drev in drevs:\\n ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n diffid = max(int(v) for v in drev[b'diffs'])\\n body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n desc = getdescfromdrev(drev)\\n header = b'# HG changeset patch\\\\n'\\n \\n # Try to preserve metadata from hg:meta property. Write hg patch\\n # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n # and extract in mercurial\\/patch.py for supported headers.\\n meta = getdiffmeta(diffs[b'%d' % diffid])\\n for k in _metanamemap.keys():\\n if k in meta:\\n header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n content = b'%s%s\\\\n%s' % (header, desc, body)\\n write(content)\\n \\n \\n @vcrcommand(\\n b'phabread',\\n [(b'', b'stack', False, _(b'read dependencies'))],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n the number ``123``. It could also have common operators like ``+``, ``-``,\\n ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n select a stack.\\n \\n ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n could be used to filter patches by status. For performance reason, they\\n only represent a subset of non-status selections and cannot be used alone.\\n \\n For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n stack up to D9.\\n \\n If --stack is given, follow dependencies information and read all patches.\\n It is equivalent to the ``:`` operator.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n if opts.get(b'stack'):\\n spec = b':(%s)' % spec\\n- drevs = querydrev(repo, spec)\\n+ drevs = querydrev(repo.ui, spec)\\n readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n b'phabupdate',\\n [\\n (b'', b'accept', False, _(b'accept revisions')),\\n (b'', b'reject', False, _(b'reject revisions')),\\n (b'', b'abandon', False, _(b'abandon revisions')),\\n (b'', b'reclaim', False, _(b'reclaim revisions')),\\n (b'm', b'comment', b'', _(b'comment on the last revision')),\\n ],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n \\\"\\\"\\\"update Differential Revision in batch\\n \\n DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n if len(flags) \\u003e 1:\\n raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n actions = []\\n for f in flags:\\n actions.append({b'type': f, b'value': True})\\n \\n- drevs = querydrev(repo, spec)\\n+ drevs = querydrev(repo.ui, spec)\\n for i, drev in enumerate(drevs):\\n if i + 1 == len(drevs) and opts.get(b'comment'):\\n actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n if actions:\\n params = {\\n b'objectIdentifier': drev[b'phid'],\\n b'transactions': actions,\\n }\\n callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n Has attributes `url` and `id`.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n return templateutil.hybriddict(\\n {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n )\\n else:\\n tags = ctx.repo().nodetags(ctx.node())\\n for t in tags:\\n if _differentialrevisiontagre.match(t):\\n url = ctx.repo().ui.config(b'phabricator', b'url')\\n if not url.endswith(b'\\/'):\\n url += b'\\/'\\n url += t\\n \\n return templateutil.hybriddict({b'url': url, b'id': t,})\\n return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n repo = context.resource(mapping, b'repo')\\n ui = context.resource(mapping, b'ui')\\n \\n rev = ctx.rev()\\n try:\\n drevid = getdrevmap(repo, [rev])[rev]\\n except KeyError:\\n return None\\n drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n for drev in drevs:\\n if int(drev[b'id']) == drevid:\\n return templateutil.hybriddict(\\n {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n )\\n return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n revs = repo.revs('sort(_underway(), topo)')\\n drevmap = getdrevmap(repo, revs)\\n unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n for rev, drevid in pycompat.iteritems(drevmap):\\n if drevid is not None:\\n drevids.add(drevid)\\n revsbydrevid.setdefault(drevid, set([])).add(rev)\\n else:\\n unknownrevs.append(rev)\\n \\n drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n drevsbyrev = {}\\n for drev in drevs:\\n for rev in revsbydrevid[int(drev[b'id'])]:\\n drevsbyrev[rev] = drev\\n \\n def phabstatus(ctx):\\n drev = drevsbyrev[ctx.rev()]\\n status = ui.label(\\n b'%(statusName)s' % drev,\\n b'phabricator.status.%s' % _getstatusname(drev),\\n )\\n ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n revs -= smartset.baseset(unknownrevs)\\n revdag = graphmod.dagwalker(repo, revs)\\n \\n ui.setconfig(b'experimental', b'graphshorten', True)\\n displayer._exthook = phabstatus\\n nodelen = show.longestshortest(repo, revs)\\n logcmdutil.displaygraph(\\n ui,\\n repo,\\n revdag,\\n displayer,\\n graphmod.asciiedges,\\n props={b'nodelen': nodelen},\\n )\\n\"}]}],\"properties\":[]},\"20441\":{\"id\":\"20441\",\"revisionID\":\"8205\",\"dateCreated\":\"1581962476\",\"dateModified\":\"1583327820\",\"sourceControlBaseRevision\":\"69392460f7b1adf37a2b1206af8a5eb8ca0828f1\",\"sourceControlPath\":null,\"sourceControlSystem\":\"hg\",\"branch\":null,\"bookmark\":null,\"creationMethod\":\"commit\",\"description\":\"rHG72c6190de577bb2bb448eb2b14121e4ef85d08ff\",\"unitStatus\":\"6\",\"lintStatus\":\"6\",\"changes\":[{\"id\":\"55596\",\"metadata\":{\"line:first\":1612,\"hash.effect\":\"8g80k6H5tv6i\"},\"oldPath\":\"hgext\\/phabricator.py\",\"currentPath\":\"hgext\\/phabricator.py\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"5\",\"delLines\":\"7\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1801\",\"newLength\":\"1799\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\" # phabricator.py - simple Phabricator integration\\n #\\n # Copyright 2017 Facebook, Inc.\\n #\\n # This software may be used and distributed according to the terms of the\\n # GNU General Public License version 2 or any later version.\\n \\\"\\\"\\\"simple Phabricator integration (EXPERIMENTAL)\\n \\n This extension provides a ``phabsend`` command which sends a stack of\\n changesets to Phabricator, and a ``phabread`` command which prints a stack of\\n revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command\\n to update statuses in batch.\\n \\n A \\\"phabstatus\\\" view for :hg:`show` is also provided; it displays status\\n information of Phabricator differentials associated with unfinished\\n changesets.\\n \\n By default, Phabricator requires ``Test Plan`` which might prevent some\\n changeset from being sent. The requirement could be disabled by changing\\n ``differential.require-test-plan-field`` config server side.\\n \\n Config::\\n \\n [phabricator]\\n # Phabricator URL\\n url = https:\\/\\/phab.example.com\\/\\n \\n # Repo callsign. If a repo has a URL https:\\/\\/$HOST\\/diffusion\\/FOO, then its\\n # callsign is \\\"FOO\\\".\\n callsign = FOO\\n \\n # curl command to use. If not set (default), use builtin HTTP library to\\n # communicate. If set, use the specified curl command. This could be useful\\n # if you need to specify advanced options that is not easily supported by\\n # the internal library.\\n curlcmd = curl --connect-timeout 2 --retry 3 --silent\\n \\n [auth]\\n example.schemes = https\\n example.prefix = phab.example.com\\n \\n # API token. Get it from https:\\/\\/$HOST\\/conduit\\/login\\/\\n example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx\\n \\\"\\\"\\\"\\n \\n from __future__ import absolute_import\\n \\n import base64\\n import contextlib\\n import hashlib\\n import itertools\\n import json\\n import mimetypes\\n import operator\\n import re\\n \\n from mercurial.node import bin, nullid\\n from mercurial.i18n import _\\n from mercurial.pycompat import getattr\\n from mercurial.thirdparty import attr\\n from mercurial import (\\n cmdutil,\\n context,\\n encoding,\\n error,\\n exthelper,\\n graphmod,\\n httpconnection as httpconnectionmod,\\n localrepo,\\n logcmdutil,\\n match,\\n mdiff,\\n obsutil,\\n parser,\\n patch,\\n phases,\\n pycompat,\\n scmutil,\\n smartset,\\n tags,\\n templatefilters,\\n templateutil,\\n url as urlmod,\\n util,\\n )\\n from mercurial.utils import (\\n procutil,\\n stringutil,\\n )\\n from . import show\\n \\n \\n # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for\\n # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should\\n # be specifying the version(s) of Mercurial they are tested with, or\\n # leave the attribute unspecified.\\n testedwith = b'ships-with-hg-core'\\n \\n eh = exthelper.exthelper()\\n \\n cmdtable = eh.cmdtable\\n command = eh.command\\n configtable = eh.configtable\\n templatekeyword = eh.templatekeyword\\n uisetup = eh.finaluisetup\\n \\n # developer config: phabricator.batchsize\\n eh.configitem(\\n b'phabricator', b'batchsize', default=12,\\n )\\n eh.configitem(\\n b'phabricator', b'callsign', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'curlcmd', default=None,\\n )\\n # developer config: phabricator.repophid\\n eh.configitem(\\n b'phabricator', b'repophid', default=None,\\n )\\n eh.configitem(\\n b'phabricator', b'url', default=None,\\n )\\n eh.configitem(\\n b'phabsend', b'confirm', default=False,\\n )\\n \\n colortable = {\\n b'phabricator.action.created': b'green',\\n b'phabricator.action.skipped': b'magenta',\\n b'phabricator.action.updated': b'magenta',\\n b'phabricator.desc': b'',\\n b'phabricator.drev': b'bold',\\n b'phabricator.node': b'',\\n b'phabricator.status.abandoned': b'magenta dim',\\n b'phabricator.status.accepted': b'green bold',\\n b'phabricator.status.closed': b'green',\\n b'phabricator.status.needsreview': b'yellow',\\n b'phabricator.status.needsrevision': b'red',\\n b'phabricator.status.changesplanned': b'red',\\n }\\n \\n _VCR_FLAGS = [\\n (\\n b'',\\n b'test-vcr',\\n b'',\\n _(\\n b'Path to a vcr file. If nonexistent, will record a new vcr transcript'\\n b', otherwise will mock all http requests using the specified vcr file.'\\n b' (ADVANCED)'\\n ),\\n ),\\n ]\\n \\n \\n @eh.wrapfunction(localrepo, \\\"loadhgrc\\\")\\n def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):\\n \\\"\\\"\\\"Load ``.arcconfig`` content into a ui instance on repository open.\\n \\\"\\\"\\\"\\n result = False\\n arcconfig = {}\\n \\n try:\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(wdirvfs.read(b\\\".arcconfig\\\"))\\n # json.loads only returns unicode strings\\n arcconfig = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n \\n result = True\\n except ValueError:\\n ui.warn(_(b\\\"invalid JSON in %s\\\\n\\\") % wdirvfs.join(b\\\".arcconfig\\\"))\\n except IOError:\\n pass\\n \\n cfg = util.sortdict()\\n \\n if b\\\"repository.callsign\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"callsign\\\")] = arcconfig[b\\\"repository.callsign\\\"]\\n \\n if b\\\"phabricator.uri\\\" in arcconfig:\\n cfg[(b\\\"phabricator\\\", b\\\"url\\\")] = arcconfig[b\\\"phabricator.uri\\\"]\\n \\n if cfg:\\n ui.applyconfig(cfg, source=wdirvfs.join(b\\\".arcconfig\\\"))\\n \\n return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg\\/hgrc\\n \\n \\n def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):\\n fullflags = flags + _VCR_FLAGS\\n \\n def hgmatcher(r1, r2):\\n if r1.uri != r2.uri or r1.method != r2.method:\\n return False\\n r1params = util.urlreq.parseqs(r1.body)\\n r2params = util.urlreq.parseqs(r2.body)\\n for key in r1params:\\n if key not in r2params:\\n return False\\n value = r1params[key][0]\\n # we want to compare json payloads without worrying about ordering\\n if value.startswith(b'{') and value.endswith(b'}'):\\n r1json = pycompat.json_loads(value)\\n r2json = pycompat.json_loads(r2params[key][0])\\n if r1json != r2json:\\n return False\\n elif r2params[key][0] != value:\\n return False\\n return True\\n \\n def sanitiserequest(request):\\n request.body = re.sub(\\n br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body\\n )\\n return request\\n \\n def sanitiseresponse(response):\\n if 'set-cookie' in response['headers']:\\n del response['headers']['set-cookie']\\n return response\\n \\n def decorate(fn):\\n def inner(*args, **kwargs):\\n cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))\\n if cassette:\\n import hgdemandimport\\n \\n with hgdemandimport.deactivated():\\n import vcr as vcrmod\\n import vcr.stubs as stubs\\n \\n vcr = vcrmod.VCR(\\n serializer='json',\\n before_record_request=sanitiserequest,\\n before_record_response=sanitiseresponse,\\n custom_patches=[\\n (\\n urlmod,\\n 'httpconnection',\\n stubs.VCRHTTPConnection,\\n ),\\n (\\n urlmod,\\n 'httpsconnection',\\n stubs.VCRHTTPSConnection,\\n ),\\n ],\\n )\\n vcr.register_matcher('hgmatcher', hgmatcher)\\n with vcr.use_cassette(cassette, match_on=['hgmatcher']):\\n return fn(*args, **kwargs)\\n return fn(*args, **kwargs)\\n \\n inner.__name__ = fn.__name__\\n inner.__doc__ = fn.__doc__\\n return command(\\n name,\\n fullflags,\\n spec,\\n helpcategory=helpcategory,\\n optionalrepo=optionalrepo,\\n )(inner)\\n \\n return decorate\\n \\n \\n def urlencodenested(params):\\n \\\"\\\"\\\"like urlencode, but works with nested parameters.\\n \\n For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be\\n flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to\\n urlencode. Note: the encoding is consistent with PHP's http_build_query.\\n \\\"\\\"\\\"\\n flatparams = util.sortdict()\\n \\n def process(prefix, obj):\\n if isinstance(obj, bool):\\n obj = {True: b'true', False: b'false'}[obj] # Python -\\u003e PHP form\\n lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]\\n items = {list: lister, dict: lambda x: x.items()}.get(type(obj))\\n if items is None:\\n flatparams[prefix] = obj\\n else:\\n for k, v in items(obj):\\n if prefix:\\n process(b'%s[%s]' % (prefix, k), v)\\n else:\\n process(k, v)\\n \\n process(b'', params)\\n return util.urlreq.urlencode(flatparams)\\n \\n \\n def readurltoken(ui):\\n \\\"\\\"\\\"return conduit url, token and make sure they exist\\n \\n Currently read from [auth] config section. In the future, it might\\n make sense to read from .arcconfig and .arcrc as well.\\n \\\"\\\"\\\"\\n url = ui.config(b'phabricator', b'url')\\n if not url:\\n raise error.Abort(\\n _(b'config %s.%s is required') % (b'phabricator', b'url')\\n )\\n \\n res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)\\n token = None\\n \\n if res:\\n group, auth = res\\n \\n ui.debug(b\\\"using auth.%s.* for authentication\\\\n\\\" % group)\\n \\n token = auth.get(b'phabtoken')\\n \\n if not token:\\n raise error.Abort(\\n _(b'Can\\\\'t find conduit token associated to %s') % (url,)\\n )\\n \\n return url, token\\n \\n \\n def callconduit(ui, name, params):\\n \\\"\\\"\\\"call Conduit API, params is a dict. return json.loads result, or None\\\"\\\"\\\"\\n host, token = readurltoken(ui)\\n url, authinfo = util.url(b'\\/'.join([host, b'api', name])).authinfo()\\n ui.debug(b'Conduit Call: %s %s\\\\n' % (url, pycompat.byterepr(params)))\\n params = params.copy()\\n params[b'__conduit__'] = {\\n b'token': token,\\n }\\n rawdata = {\\n b'params': templatefilters.json(params),\\n b'output': b'json',\\n b'__conduit__': 1,\\n }\\n data = urlencodenested(rawdata)\\n curlcmd = ui.config(b'phabricator', b'curlcmd')\\n if curlcmd:\\n sin, sout = procutil.popen2(\\n b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))\\n )\\n sin.write(data)\\n sin.close()\\n body = sout.read()\\n else:\\n urlopener = urlmod.opener(ui, authinfo)\\n request = util.urlreq.request(pycompat.strurl(url), data=data)\\n with contextlib.closing(urlopener.open(request)) as rsp:\\n body = rsp.read()\\n ui.debug(b'Conduit Response: %s\\\\n' % body)\\n parsed = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n # json.loads only accepts bytes from py3.6+\\n pycompat.json_loads(encoding.unifromlocal(body)),\\n )\\n if parsed.get(b'error_code'):\\n msg = _(b'Conduit Error (%s): %s') % (\\n parsed[b'error_code'],\\n parsed[b'error_info'],\\n )\\n raise error.Abort(msg)\\n return parsed[b'result']\\n \\n \\n @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)\\n def debugcallconduit(ui, repo, name):\\n \\\"\\\"\\\"call Conduit API\\n \\n Call parameters are read from stdin as a JSON blob. Result will be written\\n to stdout as a JSON blob.\\n \\\"\\\"\\\"\\n # json.loads only accepts bytes from 3.6+\\n rawparams = encoding.unifromlocal(ui.fin.read())\\n # json.loads only returns unicode strings\\n params = pycompat.rapply(\\n lambda x: encoding.unitolocal(x)\\n if isinstance(x, pycompat.unicode)\\n else x,\\n pycompat.json_loads(rawparams),\\n )\\n # json.dumps only accepts unicode strings\\n result = pycompat.rapply(\\n lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,\\n callconduit(ui, name, params),\\n )\\n s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))\\n ui.write(b'%s\\\\n' % encoding.unitolocal(s))\\n \\n \\n def getrepophid(repo):\\n \\\"\\\"\\\"given callsign, return repository PHID or None\\\"\\\"\\\"\\n # developer config: phabricator.repophid\\n repophid = repo.ui.config(b'phabricator', b'repophid')\\n if repophid:\\n return repophid\\n callsign = repo.ui.config(b'phabricator', b'callsign')\\n if not callsign:\\n return None\\n query = callconduit(\\n repo.ui,\\n b'diffusion.repository.search',\\n {b'constraints': {b'callsigns': [callsign]}},\\n )\\n if len(query[b'data']) == 0:\\n return None\\n repophid = query[b'data'][0][b'phid']\\n repo.ui.setconfig(b'phabricator', b'repophid', repophid)\\n return repophid\\n \\n \\n _differentialrevisiontagre = re.compile(br'\\\\AD([1-9][0-9]*)\\\\Z')\\n _differentialrevisiondescre = re.compile(\\n br'^Differential Revision:\\\\s*(?P\\u003curl\\u003e(?:.*)D(?P\\u003cid\\u003e[1-9][0-9]*))$', re.M\\n )\\n \\n \\n def getoldnodedrevmap(repo, nodelist):\\n \\\"\\\"\\\"find previous nodes that has been sent to Phabricator\\n \\n return {node: (oldnode, Differential diff, Differential Revision ID)}\\n for node in nodelist with known previous sent versions, or associated\\n Differential Revision IDs. ``oldnode`` and ``Differential diff`` could\\n be ``None``.\\n \\n Examines commit messages like \\\"Differential Revision:\\\" to get the\\n association information.\\n \\n If such commit message line is not found, examines all precursors and their\\n tags. Tags with format like \\\"D1234\\\" are considered a match and the node\\n with that tag, and the number after \\\"D\\\" (ex. 1234) will be returned.\\n \\n The ``old node``, if not None, is guaranteed to be the last diff of\\n corresponding Differential Revision, and exist in the repo.\\n \\\"\\\"\\\"\\n unfi = repo.unfiltered()\\n has_node = unfi.changelog.index.has_node\\n \\n result = {} # {node: (oldnode?, lastdiff?, drev)}\\n toconfirm = {} # {node: (force, {precnode}, drev)}\\n for node in nodelist:\\n ctx = unfi[node]\\n # For tags like \\\"D123\\\", put them into \\\"toconfirm\\\" to verify later\\n precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))\\n for n in precnodes:\\n if has_node(n):\\n for tag in unfi.nodetags(n):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n toconfirm[node] = (0, set(precnodes), int(m.group(1)))\\n break\\n else:\\n continue # move to next predecessor\\n break # found a tag, stop\\n else:\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n toconfirm[node] = (1, set(precnodes), int(m.group('id')))\\n \\n # Double check if tags are genuine by collecting all old nodes from\\n # Phabricator, and expect precursors overlap with it.\\n if toconfirm:\\n drevs = [drev for force, precs, drev in toconfirm.values()]\\n alldiffs = callconduit(\\n unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}\\n )\\n getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None\\n for newnode, (force, precset, drev) in toconfirm.items():\\n diffs = [\\n d for d in alldiffs.values() if int(d[b'revisionID']) == drev\\n ]\\n \\n # \\\"precursors\\\" as known by Phabricator\\n phprecset = set(getnode(d) for d in diffs)\\n \\n # Ignore if precursors (Phabricator and local repo) do not overlap,\\n # and force is not set (when commit message says nothing)\\n if not force and not bool(phprecset & precset):\\n tagname = b'D%d' % drev\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n unfi.ui.warn(\\n _(\\n b'D%d: local tag removed - does not match '\\n b'Differential history\\\\n'\\n )\\n % drev\\n )\\n continue\\n \\n # Find the last node using Phabricator metadata, and make sure it\\n # exists in the repo\\n oldnode = lastdiff = None\\n if diffs:\\n lastdiff = max(diffs, key=lambda d: int(d[b'id']))\\n oldnode = getnode(lastdiff)\\n if oldnode and not has_node(oldnode):\\n oldnode = None\\n \\n result[newnode] = (oldnode, lastdiff, drev)\\n \\n return result\\n \\n \\n def getdrevmap(repo, revs):\\n \\\"\\\"\\\"Return a dict mapping each rev in `revs` to their Differential Revision\\n ID or None.\\n \\\"\\\"\\\"\\n result = {}\\n for rev in revs:\\n result[rev] = None\\n ctx = repo[rev]\\n # Check commit message\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n result[rev] = int(m.group('id'))\\n continue\\n # Check tags\\n for tag in repo.nodetags(ctx.node()):\\n m = _differentialrevisiontagre.match(tag)\\n if m:\\n result[rev] = int(m.group(1))\\n break\\n \\n return result\\n \\n \\n def getdiff(ctx, diffopts):\\n \\\"\\\"\\\"plain-text diff without header (user, commit message, etc)\\\"\\\"\\\"\\n output = util.stringio()\\n for chunk, _label in patch.diffui(\\n ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts\\n ):\\n output.write(chunk)\\n return output.getvalue()\\n \\n \\n class DiffChangeType(object):\\n ADD = 1\\n CHANGE = 2\\n DELETE = 3\\n MOVE_AWAY = 4\\n COPY_AWAY = 5\\n MOVE_HERE = 6\\n COPY_HERE = 7\\n MULTICOPY = 8\\n \\n \\n class DiffFileType(object):\\n TEXT = 1\\n IMAGE = 2\\n BINARY = 3\\n \\n \\n @attr.s\\n class phabhunk(dict):\\n \\\"\\\"\\\"Represents a Differential hunk, which is owned by a Differential change\\n \\\"\\\"\\\"\\n \\n oldOffset = attr.ib(default=0) # camelcase-required\\n oldLength = attr.ib(default=0) # camelcase-required\\n newOffset = attr.ib(default=0) # camelcase-required\\n newLength = attr.ib(default=0) # camelcase-required\\n corpus = attr.ib(default='')\\n # These get added to the phabchange's equivalents\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n \\n \\n @attr.s\\n class phabchange(object):\\n \\\"\\\"\\\"Represents a Differential change, owns Differential hunks and owned by a\\n Differential diff. Each one represents one file in a diff.\\n \\\"\\\"\\\"\\n \\n currentPath = attr.ib(default=None) # camelcase-required\\n oldPath = attr.ib(default=None) # camelcase-required\\n awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required\\n metadata = attr.ib(default=attr.Factory(dict))\\n oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required\\n type = attr.ib(default=DiffChangeType.CHANGE)\\n fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required\\n commitHash = attr.ib(default=None) # camelcase-required\\n addLines = attr.ib(default=0) # camelcase-required\\n delLines = attr.ib(default=0) # camelcase-required\\n hunks = attr.ib(default=attr.Factory(list))\\n \\n def copynewmetadatatoold(self):\\n for key in list(self.metadata.keys()):\\n newkey = key.replace(b'new:', b'old:')\\n self.metadata[newkey] = self.metadata[key]\\n \\n def addoldmode(self, value):\\n self.oldProperties[b'unix:filemode'] = value\\n \\n def addnewmode(self, value):\\n self.newProperties[b'unix:filemode'] = value\\n \\n def addhunk(self, hunk):\\n if not isinstance(hunk, phabhunk):\\n raise error.Abort(b'phabchange.addhunk only takes phabhunks')\\n self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))\\n # It's useful to include these stats since the Phab web UI shows them,\\n # and uses them to estimate how large a change a Revision is. Also used\\n # in email subjects for the [+++--] bit.\\n self.addLines += hunk.addLines\\n self.delLines += hunk.delLines\\n \\n \\n @attr.s\\n class phabdiff(object):\\n \\\"\\\"\\\"Represents a Differential diff, owns Differential changes. Corresponds\\n to a commit.\\n \\\"\\\"\\\"\\n \\n # Doesn't seem to be any reason to send this (output of uname -n)\\n sourceMachine = attr.ib(default=b'') # camelcase-required\\n sourcePath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required\\n sourceControlPath = attr.ib(default=b'\\/') # camelcase-required\\n sourceControlSystem = attr.ib(default=b'hg') # camelcase-required\\n branch = attr.ib(default=b'default')\\n bookmark = attr.ib(default=None)\\n creationMethod = attr.ib(default=b'phabsend') # camelcase-required\\n lintStatus = attr.ib(default=b'none') # camelcase-required\\n unitStatus = attr.ib(default=b'none') # camelcase-required\\n changes = attr.ib(default=attr.Factory(dict))\\n repositoryPHID = attr.ib(default=None) # camelcase-required\\n \\n def addchange(self, change):\\n if not isinstance(change, phabchange):\\n raise error.Abort(b'phabdiff.addchange only takes phabchanges')\\n self.changes[change.currentPath] = pycompat.byteskwargs(\\n attr.asdict(change)\\n )\\n \\n \\n def maketext(pchange, ctx, fname):\\n \\\"\\\"\\\"populate the phabchange for a text file\\\"\\\"\\\"\\n repo = ctx.repo()\\n fmatcher = match.exact([fname])\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n _pfctx, _fctx, header, fhunks = next(\\n patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)\\n )\\n \\n for fhunk in fhunks:\\n (oldOffset, oldLength, newOffset, newLength), lines = fhunk\\n corpus = b''.join(lines[1:])\\n shunk = list(header)\\n shunk.extend(lines)\\n _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(\\n patch.diffstatdata(util.iterlines(shunk))\\n )\\n pchange.addhunk(\\n phabhunk(\\n oldOffset,\\n oldLength,\\n newOffset,\\n newLength,\\n corpus,\\n addLines,\\n delLines,\\n )\\n )\\n \\n \\n def uploadchunks(fctx, fphid):\\n \\\"\\\"\\\"upload large binary files as separate chunks.\\n Phab requests chunking over 8MiB, and splits into 4MiB chunks\\n \\\"\\\"\\\"\\n ui = fctx.repo().ui\\n chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})\\n with ui.makeprogress(\\n _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)\\n ) as progress:\\n for chunk in chunks:\\n progress.increment()\\n if chunk[b'complete']:\\n continue\\n bstart = int(chunk[b'byteStart'])\\n bend = int(chunk[b'byteEnd'])\\n callconduit(\\n ui,\\n b'file.uploadchunk',\\n {\\n b'filePHID': fphid,\\n b'byteStart': bstart,\\n b'data': base64.b64encode(fctx.data()[bstart:bend]),\\n b'dataEncoding': b'base64',\\n },\\n )\\n \\n \\n def uploadfile(fctx):\\n \\\"\\\"\\\"upload binary files to Phabricator\\\"\\\"\\\"\\n repo = fctx.repo()\\n ui = repo.ui\\n fname = fctx.path()\\n size = fctx.size()\\n fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())\\n \\n # an allocate call is required first to see if an upload is even required\\n # (Phab might already have it) and to determine if chunking is needed\\n allocateparams = {\\n b'name': fname,\\n b'contentLength': size,\\n b'contentHash': fhash,\\n }\\n filealloc = callconduit(ui, b'file.allocate', allocateparams)\\n fphid = filealloc[b'filePHID']\\n \\n if filealloc[b'upload']:\\n ui.write(_(b'uploading %s\\\\n') % bytes(fctx))\\n if not fphid:\\n uploadparams = {\\n b'name': fname,\\n b'data_base64': base64.b64encode(fctx.data()),\\n }\\n fphid = callconduit(ui, b'file.upload', uploadparams)\\n else:\\n uploadchunks(fctx, fphid)\\n else:\\n ui.debug(b'server already has %s\\\\n' % bytes(fctx))\\n \\n if not fphid:\\n raise error.Abort(b'Upload of %s failed.' % bytes(fctx))\\n \\n return fphid\\n \\n \\n def addoldbinary(pchange, fctx):\\n \\\"\\\"\\\"add the metadata for the previous version of a binary file to the\\n phabchange for the new version\\n \\\"\\\"\\\"\\n oldfctx = fctx.p1()\\n if fctx.cmp(oldfctx):\\n # Files differ, add the old one\\n pchange.metadata[b'old:file:size'] = oldfctx.size()\\n mimeguess, _enc = mimetypes.guess_type(\\n encoding.unifromlocal(oldfctx.path())\\n )\\n if mimeguess:\\n pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(\\n mimeguess\\n )\\n fphid = uploadfile(oldfctx)\\n pchange.metadata[b'old:binary-phid'] = fphid\\n else:\\n # If it's left as IMAGE\\/BINARY web UI might try to display it\\n pchange.fileType = DiffFileType.TEXT\\n pchange.copynewmetadatatoold()\\n \\n \\n def makebinary(pchange, fctx):\\n \\\"\\\"\\\"populate the phabchange for a binary file\\\"\\\"\\\"\\n pchange.fileType = DiffFileType.BINARY\\n fphid = uploadfile(fctx)\\n pchange.metadata[b'new:binary-phid'] = fphid\\n pchange.metadata[b'new:file:size'] = fctx.size()\\n mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))\\n if mimeguess:\\n mimeguess = pycompat.bytestr(mimeguess)\\n pchange.metadata[b'new:file:mime-type'] = mimeguess\\n if mimeguess.startswith(b'image\\/'):\\n pchange.fileType = DiffFileType.IMAGE\\n \\n \\n # Copied from mercurial\\/patch.py\\n gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}\\n \\n \\n def notutf8(fctx):\\n \\\"\\\"\\\"detect non-UTF-8 text files since Phabricator requires them to be marked\\n as binary\\n \\\"\\\"\\\"\\n try:\\n fctx.data().decode('utf-8')\\n if fctx.parents():\\n fctx.p1().data().decode('utf-8')\\n return False\\n except UnicodeDecodeError:\\n fctx.repo().ui.write(\\n _(b'file %s detected as non-UTF-8, marked as binary\\\\n')\\n % fctx.path()\\n )\\n return True\\n \\n \\n def addremoved(pdiff, ctx, removed):\\n \\\"\\\"\\\"add removed files to the phabdiff. Shouldn't include moves\\\"\\\"\\\"\\n for fname in removed:\\n pchange = phabchange(\\n currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE\\n )\\n pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])\\n fctx = ctx.p1()[fname]\\n if not (fctx.isbinary() or notutf8(fctx)):\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addmodified(pdiff, ctx, modified):\\n \\\"\\\"\\\"add modified files to the phabdiff\\\"\\\"\\\"\\n for fname in modified:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname, oldPath=fname)\\n filemode = gitmode[ctx[fname].flags()]\\n originalmode = gitmode[ctx.p1()[fname].flags()]\\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n \\n def addadded(pdiff, ctx, added, removed):\\n \\\"\\\"\\\"add file adds to the phabdiff, both new files and copies\\/moves\\\"\\\"\\\"\\n # Keep track of files that've been recorded as moved\\/copied, so if there are\\n # additional copies we can mark them (moves get removed from removed)\\n copiedchanges = {}\\n movedchanges = {}\\n for fname in added:\\n fctx = ctx[fname]\\n pchange = phabchange(currentPath=fname)\\n \\n filemode = gitmode[ctx[fname].flags()]\\n renamed = fctx.renamed()\\n \\n if renamed:\\n originalfname = renamed[0]\\n originalmode = gitmode[ctx.p1()[originalfname].flags()]\\n pchange.oldPath = originalfname\\n \\n if originalfname in removed:\\n origpchange = phabchange(\\n currentPath=originalfname,\\n oldPath=originalfname,\\n type=DiffChangeType.MOVE_AWAY,\\n awayPaths=[fname],\\n )\\n movedchanges[originalfname] = origpchange\\n removed.remove(originalfname)\\n pchange.type = DiffChangeType.MOVE_HERE\\n elif originalfname in movedchanges:\\n movedchanges[originalfname].type = DiffChangeType.MULTICOPY\\n movedchanges[originalfname].awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n else: # pure copy\\n if originalfname not in copiedchanges:\\n origpchange = phabchange(\\n currentPath=originalfname, type=DiffChangeType.COPY_AWAY\\n )\\n copiedchanges[originalfname] = origpchange\\n else:\\n origpchange = copiedchanges[originalfname]\\n origpchange.awayPaths.append(fname)\\n pchange.type = DiffChangeType.COPY_HERE\\n \\n if filemode != originalmode:\\n pchange.addoldmode(originalmode)\\n pchange.addnewmode(filemode)\\n else: # Brand-new file\\n pchange.addnewmode(gitmode[fctx.flags()])\\n pchange.type = DiffChangeType.ADD\\n \\n if fctx.isbinary() or notutf8(fctx):\\n makebinary(pchange, fctx)\\n if renamed:\\n addoldbinary(pchange, fctx)\\n else:\\n maketext(pchange, ctx, fname)\\n \\n pdiff.addchange(pchange)\\n \\n for _path, copiedchange in copiedchanges.items():\\n pdiff.addchange(copiedchange)\\n for _path, movedchange in movedchanges.items():\\n pdiff.addchange(movedchange)\\n \\n \\n def creatediff(ctx):\\n \\\"\\\"\\\"create a Differential Diff\\\"\\\"\\\"\\n repo = ctx.repo()\\n repophid = getrepophid(repo)\\n # Create a \\\"Differential Diff\\\" via \\\"differential.creatediff\\\" API\\n pdiff = phabdiff(\\n sourceControlBaseRevision=b'%s' % ctx.p1().hex(),\\n branch=b'%s' % ctx.branch(),\\n )\\n modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)\\n # addadded will remove moved files from removed, so addremoved won't get\\n # them\\n addadded(pdiff, ctx, added, removed)\\n addmodified(pdiff, ctx, modified)\\n addremoved(pdiff, ctx, removed)\\n if repophid:\\n pdiff.repositoryPHID = repophid\\n diff = callconduit(\\n repo.ui,\\n b'differential.creatediff',\\n pycompat.byteskwargs(attr.asdict(pdiff)),\\n )\\n if not diff:\\n raise error.Abort(_(b'cannot create diff for %s') % ctx)\\n return diff\\n \\n \\n def writediffproperties(ctx, diff):\\n \\\"\\\"\\\"write metadata to diff so patches could be applied losslessly\\\"\\\"\\\"\\n # creatediff returns with a diffid but query returns with an id\\n diffid = diff.get(b'diffid', diff.get(b'id'))\\n params = {\\n b'diff_id': diffid,\\n b'name': b'hg:meta',\\n b'data': templatefilters.json(\\n {\\n b'user': ctx.user(),\\n b'date': b'%d %d' % ctx.date(),\\n b'branch': ctx.branch(),\\n b'node': ctx.hex(),\\n b'parent': ctx.p1().hex(),\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n params = {\\n b'diff_id': diffid,\\n b'name': b'local:commits',\\n b'data': templatefilters.json(\\n {\\n ctx.hex(): {\\n b'author': stringutil.person(ctx.user()),\\n b'authorEmail': stringutil.email(ctx.user()),\\n b'time': int(ctx.date()[0]),\\n b'commit': ctx.hex(),\\n b'parents': [ctx.p1().hex()],\\n b'branch': ctx.branch(),\\n },\\n }\\n ),\\n }\\n callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)\\n \\n \\n def createdifferentialrevision(\\n ctx,\\n revid=None,\\n parentrevphid=None,\\n oldnode=None,\\n olddiff=None,\\n actions=None,\\n comment=None,\\n ):\\n \\\"\\\"\\\"create or update a Differential Revision\\n \\n If revid is None, create a new Differential Revision, otherwise update\\n revid. If parentrevphid is not None, set it as a dependency.\\n \\n If oldnode is not None, check if the patch content (without commit message\\n and metadata) has changed before creating another diff.\\n \\n If actions is not None, they will be appended to the transaction.\\n \\\"\\\"\\\"\\n repo = ctx.repo()\\n if oldnode:\\n diffopts = mdiff.diffopts(git=True, context=32767)\\n oldctx = repo.unfiltered()[oldnode]\\n neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)\\n else:\\n neednewdiff = True\\n \\n transactions = []\\n if neednewdiff:\\n diff = creatediff(ctx)\\n transactions.append({b'type': b'update', b'value': diff[b'phid']})\\n if comment:\\n transactions.append({b'type': b'comment', b'value': comment})\\n else:\\n # Even if we don't need to upload a new diff because the patch content\\n # does not change. We might still need to update its metadata so\\n # pushers could know the correct node metadata.\\n assert olddiff\\n diff = olddiff\\n writediffproperties(ctx, diff)\\n \\n # Set the parent Revision every time, so commit re-ordering is picked-up\\n if parentrevphid:\\n transactions.append(\\n {b'type': b'parents.set', b'value': [parentrevphid]}\\n )\\n \\n if actions:\\n transactions += actions\\n \\n # Parse commit message and update related fields.\\n desc = ctx.description()\\n info = callconduit(\\n repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}\\n )\\n for k, v in info[b'fields'].items():\\n if k in [b'title', b'summary', b'testPlan']:\\n transactions.append({b'type': k, b'value': v})\\n \\n params = {b'transactions': transactions}\\n if revid is not None:\\n # Update an existing Differential Revision\\n params[b'objectIdentifier'] = revid\\n \\n revision = callconduit(repo.ui, b'differential.revision.edit', params)\\n if not revision:\\n raise error.Abort(_(b'cannot create revision for %s') % ctx)\\n \\n return revision, diff\\n \\n \\n def userphids(repo, names):\\n \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n names = [name.lower() for name in names]\\n query = {b'constraints': {b'usernames': names}}\\n result = callconduit(repo.ui, b'user.search', query)\\n # username not found is not an error of the API. So check if we have missed\\n # some names here.\\n data = result[b'data']\\n resolved = set(entry[b'fields'][b'username'].lower() for entry in data)\\n unresolved = set(names) - resolved\\n if unresolved:\\n raise error.Abort(\\n _(b'unknown username: %s') % b' '.join(sorted(unresolved))\\n )\\n return [entry[b'phid'] for entry in data]\\n \\n \\n @vcrcommand(\\n b'phabsend',\\n [\\n (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),\\n (b'', b'amend', True, _(b'update commit messages')),\\n (b'', b'reviewer', [], _(b'specify reviewers')),\\n (b'', b'blocker', [], _(b'specify blocking reviewers')),\\n (\\n b'm',\\n b'comment',\\n b'',\\n _(b'add a comment to Revisions with new\\/updated Diffs'),\\n ),\\n (b'', b'confirm', None, _(b'ask for confirmation before sending')),\\n ],\\n _(b'REV [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabsend(ui, repo, *revs, **opts):\\n \\\"\\\"\\\"upload changesets to Phabricator\\n \\n If there are multiple revisions specified, they will be send as a stack\\n with a linear dependencies relationship using the order specified by the\\n revset.\\n \\n For the first time uploading changesets, local tags will be created to\\n maintain the association. After the first time, phabsend will check\\n obsstore and tags information so it can figure out whether to update an\\n existing Differential Revision, or create a new one.\\n \\n If --amend is set, update commit messages so they have the\\n ``Differential Revision`` URL, remove related tags. This is similar to what\\n arcanist will do, and is more desired in author-push workflows. Otherwise,\\n use local tags to record the ``Differential Revision`` association.\\n \\n The --confirm option lets you confirm changesets before sending them. You\\n can also add following to your configuration file to make it default\\n behaviour::\\n \\n [phabsend]\\n confirm = true\\n \\n phabsend will check obsstore and the above association to decide whether to\\n update an existing Differential Revision, or create a new one.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n revs = list(revs) + opts.get(b'rev', [])\\n revs = scmutil.revrange(repo, revs)\\n revs.sort() # ascending order to preserve topological parent\\/child in phab\\n \\n if not revs:\\n raise error.Abort(_(b'phabsend requires at least one changeset'))\\n if opts.get(b'amend'):\\n cmdutil.checkunfinished(repo)\\n \\n # {newnode: (oldnode, olddiff, olddrev}\\n oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])\\n \\n confirm = ui.configbool(b'phabsend', b'confirm')\\n confirm |= bool(opts.get(b'confirm'))\\n if confirm:\\n confirmed = _confirmbeforesend(repo, revs, oldmap)\\n if not confirmed:\\n raise error.Abort(_(b'phabsend cancelled'))\\n \\n actions = []\\n reviewers = opts.get(b'reviewer', [])\\n blockers = opts.get(b'blocker', [])\\n phids = []\\n if reviewers:\\n phids.extend(userphids(repo, reviewers))\\n if blockers:\\n phids.extend(\\n map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n )\\n if phids:\\n actions.append({b'type': b'reviewers.add', b'value': phids})\\n \\n drevids = [] # [int]\\n diffmap = {} # {newnode: diff}\\n \\n # Send patches one by one so we know their Differential Revision PHIDs and\\n # can provide dependency relationship\\n lastrevphid = None\\n for rev in revs:\\n ui.debug(b'sending rev %d\\\\n' % rev)\\n ctx = repo[rev]\\n \\n # Get Differential Revision ID\\n oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))\\n if oldnode != ctx.node() or opts.get(b'amend'):\\n # Create or update Differential Revision\\n revision, diff = createdifferentialrevision(\\n ctx,\\n revid,\\n lastrevphid,\\n oldnode,\\n olddiff,\\n actions,\\n opts.get(b'comment'),\\n )\\n diffmap[ctx.node()] = diff\\n newrevid = int(revision[b'object'][b'id'])\\n newrevphid = revision[b'object'][b'phid']\\n if revid:\\n action = b'updated'\\n else:\\n action = b'created'\\n \\n # Create a local tag to note the association, if commit message\\n # does not have it already\\n m = _differentialrevisiondescre.search(ctx.description())\\n if not m or int(m.group('id')) != newrevid:\\n tagname = b'D%d' % newrevid\\n tags.tag(\\n repo,\\n tagname,\\n ctx.node(),\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n else:\\n # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n # could depend on this one and \\\"newrevid\\\" for the summary line.\\n newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n newrevid = revid\\n action = b'skipped'\\n \\n actiondesc = ui.label(\\n {\\n b'created': _(b'created'),\\n b'skipped': _(b'skipped'),\\n b'updated': _(b'updated'),\\n }[action],\\n b'phabricator.action.%s' % action,\\n )\\n drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')\\n nodedesc = ui.label(bytes(ctx), b'phabricator.node')\\n desc = ui.label(ctx.description().split(b'\\\\n')[0], b'phabricator.desc')\\n ui.write(\\n _(b'%s - %s - %s: %s\\\\n') % (drevdesc, actiondesc, nodedesc, desc)\\n )\\n drevids.append(newrevid)\\n lastrevphid = newrevphid\\n \\n # Update commit messages and remove tags\\n if opts.get(b'amend'):\\n unfi = repo.unfiltered()\\n drevs = callconduit(ui, b'differential.query', {b'ids': drevids})\\n with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):\\n wnode = unfi[b'.'].node()\\n mapping = {} # {oldnode: [newnode]}\\n for i, rev in enumerate(revs):\\n old = unfi[rev]\\n drevid = drevids[i]\\n drev = [d for d in drevs if int(d[b'id']) == drevid][0]\\n newdesc = getdescfromdrev(drev)\\n # Make sure commit message contain \\\"Differential Revision\\\"\\n if old.description() != newdesc:\\n if old.phase() == phases.public:\\n ui.warn(\\n _(b\\\"warning: not updating public commit %s\\\\n\\\")\\n % scmutil.formatchangeid(old)\\n )\\n continue\\n parents = [\\n mapping.get(old.p1().node(), (old.p1(),))[0],\\n mapping.get(old.p2().node(), (old.p2(),))[0],\\n ]\\n new = context.metadataonlyctx(\\n repo,\\n old,\\n parents=parents,\\n text=newdesc,\\n user=old.user(),\\n date=old.date(),\\n extra=old.extra(),\\n )\\n \\n newnode = new.commit()\\n \\n mapping[old.node()] = [newnode]\\n # Update diff property\\n # If it fails just warn and keep going, otherwise the DREV\\n # associations will be lost\\n try:\\n writediffproperties(unfi[newnode], diffmap[old.node()])\\n except util.urlerr.urlerror:\\n ui.warnnoi18n(\\n b'Failed to update metadata for D%d\\\\n' % drevid\\n )\\n # Remove local tags since it's no longer necessary\\n tagname = b'D%d' % drevid\\n if tagname in repo.tags():\\n tags.tag(\\n repo,\\n tagname,\\n nullid,\\n message=None,\\n user=None,\\n date=None,\\n local=True,\\n )\\n scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)\\n if wnode in mapping:\\n unfi.setparents(mapping[wnode][0])\\n \\n \\n # Map from \\\"hg:meta\\\" keys to header understood by \\\"hg import\\\". The order is\\n # consistent with \\\"hg export\\\" output.\\n _metanamemap = util.sortdict(\\n [\\n (b'user', b'User'),\\n (b'date', b'Date'),\\n (b'branch', b'Branch'),\\n (b'node', b'Node ID'),\\n (b'parent', b'Parent '),\\n ]\\n )\\n \\n \\n def _confirmbeforesend(repo, revs, oldmap):\\n url, token = readurltoken(repo.ui)\\n ui = repo.ui\\n for rev in revs:\\n ctx = repo[rev]\\n desc = ctx.description().splitlines()[0]\\n oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))\\n if drevid:\\n drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')\\n else:\\n drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')\\n \\n ui.write(\\n _(b'%s - %s: %s\\\\n')\\n % (\\n drevdesc,\\n ui.label(bytes(ctx), b'phabricator.node'),\\n ui.label(desc, b'phabricator.desc'),\\n )\\n )\\n \\n if ui.promptchoice(\\n _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url\\n ):\\n return False\\n \\n return True\\n \\n \\n _knownstatusnames = {\\n b'accepted',\\n b'needsreview',\\n b'needsrevision',\\n b'closed',\\n b'abandoned',\\n b'changesplanned',\\n }\\n \\n \\n def _getstatusname(drev):\\n \\\"\\\"\\\"get normalized status name from a Differential Revision\\\"\\\"\\\"\\n return drev[b'statusName'].replace(b' ', b'').lower()\\n \\n \\n # Small language to specify differential revisions. Support symbols: (), :X,\\n # +, and -.\\n \\n _elements = {\\n # token-type: binding-strength, primary, prefix, infix, suffix\\n b'(': (12, None, (b'group', 1, b')'), None, None),\\n b':': (8, None, (b'ancestors', 8), None, None),\\n b'&': (5, None, None, (b'and_', 5), None),\\n b'+': (4, None, None, (b'add', 4), None),\\n b'-': (4, None, None, (b'sub', 4), None),\\n b')': (0, None, None, None, None),\\n b'symbol': (0, b'symbol', None, None, None),\\n b'end': (0, None, None, None, None),\\n }\\n \\n \\n def _tokenize(text):\\n view = memoryview(text) # zero-copy slice\\n special = b'():+-& '\\n pos = 0\\n length = len(text)\\n while pos \\u003c length:\\n symbol = b''.join(\\n itertools.takewhile(\\n lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])\\n )\\n )\\n if symbol:\\n yield (b'symbol', symbol, pos)\\n pos += len(symbol)\\n else: # special char, ignore space\\n if text[pos : pos + 1] != b' ':\\n yield (text[pos : pos + 1], None, pos)\\n pos += 1\\n yield (b'end', None, pos)\\n \\n \\n def _parse(text):\\n tree, pos = parser.parser(_elements).parse(_tokenize(text))\\n if pos != len(text):\\n raise error.ParseError(b'invalid token', pos)\\n return tree\\n \\n \\n def _parsedrev(symbol):\\n \\\"\\\"\\\"str -\\u003e int or None, ex. 'D45' -\\u003e 45; '12' -\\u003e 12; 'x' -\\u003e None\\\"\\\"\\\"\\n if symbol.startswith(b'D') and symbol[1:].isdigit():\\n return int(symbol[1:])\\n if symbol.isdigit():\\n return int(symbol)\\n \\n \\n def _prefetchdrevs(tree):\\n \\\"\\\"\\\"return ({single-drev-id}, {ancestor-drev-id}) to prefetch\\\"\\\"\\\"\\n drevs = set()\\n ancestordrevs = set()\\n op = tree[0]\\n if op == b'symbol':\\n r = _parsedrev(tree[1])\\n if r:\\n drevs.add(r)\\n elif op == b'ancestors':\\n r, a = _prefetchdrevs(tree[1])\\n drevs.update(r)\\n ancestordrevs.update(r)\\n ancestordrevs.update(a)\\n else:\\n for t in tree[1:]:\\n r, a = _prefetchdrevs(t)\\n drevs.update(r)\\n ancestordrevs.update(a)\\n return drevs, ancestordrevs\\n \\n \\n def querydrev(repo, spec):\\n \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n spec is a string using a simple query language, see docstring in phabread\\n for details.\\n \\n A \\\"Differential Revision dict\\\" looks like:\\n \\n {\\n \\\"activeDiffPHID\\\": \\\"PHID-DIFF-xoqnjkobbm6k4dk6hi72\\\",\\n \\\"authorPHID\\\": \\\"PHID-USER-tv3ohwc4v4jeu34otlye\\\",\\n \\\"auxiliary\\\": {\\n \\\"phabricator:depends-on\\\": [\\n \\\"PHID-DREV-gbapp366kutjebt7agcd\\\"\\n ]\\n \\\"phabricator:projects\\\": [],\\n },\\n \\\"branch\\\": \\\"default\\\",\\n \\\"ccs\\\": [],\\n \\\"commits\\\": [],\\n \\\"dateCreated\\\": \\\"1499181406\\\",\\n \\\"dateModified\\\": \\\"1499182103\\\",\\n \\\"diffs\\\": [\\n \\\"3\\\",\\n \\\"4\\\",\\n ],\\n \\\"hashes\\\": [],\\n \\\"id\\\": \\\"2\\\",\\n \\\"lineCount\\\": \\\"2\\\",\\n \\\"phid\\\": \\\"PHID-DREV-672qvysjcczopag46qty\\\",\\n \\\"properties\\\": {},\\n \\\"repositoryPHID\\\": \\\"PHID-REPO-hub2hx62ieuqeheznasv\\\",\\n \\\"reviewers\\\": [],\\n \\\"sourcePath\\\": null\\n \\\"status\\\": \\\"0\\\",\\n \\\"statusName\\\": \\\"Needs Review\\\",\\n \\\"summary\\\": \\\"\\\",\\n \\\"testPlan\\\": \\\"\\\",\\n \\\"title\\\": \\\"example\\\",\\n \\\"uri\\\": \\\"https:\\/\\/phab.example.com\\/D2\\\",\\n }\\n \\\"\\\"\\\"\\n # TODO: replace differential.query and differential.querydiffs with\\n # differential.diff.search because the former (and their output) are\\n # frozen, and planned to be deprecated and removed.\\n \\n def fetch(params):\\n \\\"\\\"\\\"params -\\u003e single drev or None\\\"\\\"\\\"\\n key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n if key in prefetched:\\n return prefetched[key]\\n drevs = callconduit(repo.ui, b'differential.query', params)\\n # Fill prefetched with the result\\n for drev in drevs:\\n prefetched[drev[b'phid']] = drev\\n prefetched[int(drev[b'id'])] = drev\\n if key not in prefetched:\\n raise error.Abort(\\n _(b'cannot get Differential Revision %r') % params\\n )\\n return prefetched[key]\\n \\n def getstack(topdrevids):\\n \\\"\\\"\\\"given a top, get a stack from the bottom, [id] -\\u003e [id]\\\"\\\"\\\"\\n visited = set()\\n result = []\\n queue = [{b'ids': [i]} for i in topdrevids]\\n while queue:\\n params = queue.pop()\\n drev = fetch(params)\\n if drev[b'id'] in visited:\\n continue\\n visited.add(drev[b'id'])\\n result.append(int(drev[b'id']))\\n auxiliary = drev.get(b'auxiliary', {})\\n depends = auxiliary.get(b'phabricator:depends-on', [])\\n for phid in depends:\\n queue.append({b'phids': [phid]})\\n result.reverse()\\n return smartset.baseset(result)\\n \\n # Initialize prefetch cache\\n prefetched = {} # {id or phid: drev}\\n \\n tree = _parse(spec)\\n drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n # developer config: phabricator.batchsize\\n batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n \\n # Prefetch Differential Revisions in batch\\n tofetch = set(drevs)\\n for r in ancestordrevs:\\n tofetch.update(range(max(1, r - batchsize), r + 1))\\n if drevs:\\n fetch({b'ids': list(tofetch)})\\n validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))\\n \\n # Walk through the tree, return smartsets\\n def walk(tree):\\n op = tree[0]\\n if op == b'symbol':\\n drev = _parsedrev(tree[1])\\n if drev:\\n return smartset.baseset([drev])\\n elif tree[1] in _knownstatusnames:\\n drevs = [\\n r\\n for r in validids\\n if _getstatusname(prefetched[r]) == tree[1]\\n ]\\n return smartset.baseset(drevs)\\n else:\\n raise error.Abort(_(b'unknown symbol: %s') % tree[1])\\n elif op in {b'and_', b'add', b'sub'}:\\n assert len(tree) == 3\\n return getattr(operator, op)(walk(tree[1]), walk(tree[2]))\\n elif op == b'group':\\n return walk(tree[1])\\n elif op == b'ancestors':\\n return getstack(walk(tree[1]))\\n else:\\n raise error.ProgrammingError(b'illegal tree: %r' % tree)\\n \\n return [prefetched[r] for r in walk(tree)]\\n \\n \\n def getdescfromdrev(drev):\\n \\\"\\\"\\\"get description (commit message) from \\\"Differential Revision\\\"\\n \\n This is similar to differential.getcommitmessage API. But we only care\\n about limited fields: title, summary, test plan, and URL.\\n \\\"\\\"\\\"\\n title = drev[b'title']\\n summary = drev[b'summary'].rstrip()\\n testplan = drev[b'testPlan'].rstrip()\\n if testplan:\\n testplan = b'Test Plan:\\\\n%s' % testplan\\n uri = b'Differential Revision: %s' % drev[b'uri']\\n return b'\\\\n\\\\n'.join(filter(None, [title, summary, testplan, uri]))\\n \\n \\n def getdiffmeta(diff):\\n \\\"\\\"\\\"get commit metadata (date, node, user, p1) from a diff object\\n \\n The metadata could be \\\"hg:meta\\\", sent by phabsend, like:\\n \\n \\\"properties\\\": {\\n \\\"hg:meta\\\": {\\n \\\"branch\\\": \\\"default\\\",\\n \\\"date\\\": \\\"1499571514 25200\\\",\\n \\\"node\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"user\\\": \\\"Foo Bar \\u003cfoo@example.com\\u003e\\\",\\n \\\"parent\\\": \\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"\\n }\\n }\\n \\n Or converted from \\\"local:commits\\\", sent by \\\"arc\\\", like:\\n \\n \\\"properties\\\": {\\n \\\"local:commits\\\": {\\n \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\": {\\n \\\"author\\\": \\\"Foo Bar\\\",\\n \\\"authorEmail\\\": \\\"foo@example.com\\\"\\n \\\"branch\\\": \\\"default\\\",\\n \\\"commit\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"local\\\": \\\"1000\\\",\\n \\\"message\\\": \\\"...\\\",\\n \\\"parents\\\": [\\\"6d0abad76b30e4724a37ab8721d630394070fe16\\\"],\\n \\\"rev\\\": \\\"98c08acae292b2faf60a279b4189beb6cff1414d\\\",\\n \\\"summary\\\": \\\"...\\\",\\n \\\"tag\\\": \\\"\\\",\\n \\\"time\\\": 1499546314,\\n }\\n }\\n }\\n \\n Note: metadata extracted from \\\"local:commits\\\" will lose time zone\\n information.\\n \\\"\\\"\\\"\\n props = diff.get(b'properties') or {}\\n meta = props.get(b'hg:meta')\\n if not meta:\\n if props.get(b'local:commits'):\\n commit = sorted(props[b'local:commits'].values())[0]\\n meta = {}\\n if b'author' in commit and b'authorEmail' in commit:\\n meta[b'user'] = b'%s \\u003c%s\\u003e' % (\\n commit[b'author'],\\n commit[b'authorEmail'],\\n )\\n if b'time' in commit:\\n meta[b'date'] = b'%d 0' % int(commit[b'time'])\\n if b'branch' in commit:\\n meta[b'branch'] = commit[b'branch']\\n node = commit.get(b'commit', commit.get(b'rev'))\\n if node:\\n meta[b'node'] = node\\n if len(commit.get(b'parents', ())) \\u003e= 1:\\n meta[b'parent'] = commit[b'parents'][0]\\n else:\\n meta = {}\\n if b'date' not in meta and b'dateCreated' in diff:\\n meta[b'date'] = b'%s 0' % diff[b'dateCreated']\\n if b'branch' not in meta and diff.get(b'branch'):\\n meta[b'branch'] = diff[b'branch']\\n if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):\\n meta[b'parent'] = diff[b'sourceControlBaseRevision']\\n return meta\\n \\n \\n-def readpatch(repo, drevs, write):\\n+def readpatch(ui, drevs, write):\\n \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n \\\"differential.query\\\".\\n \\\"\\\"\\\"\\n # Prefetch hg:meta property for all diffs\\n diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n- diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})\\n+ diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n # Generate patch for each drev\\n for drev in drevs:\\n- repo.ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n+ ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n diffid = max(int(v) for v in drev[b'diffs'])\\n- body = callconduit(\\n- repo.ui, b'differential.getrawdiff', {b'diffID': diffid}\\n- )\\n+ body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n desc = getdescfromdrev(drev)\\n header = b'# HG changeset patch\\\\n'\\n \\n # Try to preserve metadata from hg:meta property. Write hg patch\\n # headers that can be read by the \\\"import\\\" command. See patchheadermap\\n # and extract in mercurial\\/patch.py for supported headers.\\n meta = getdiffmeta(diffs[b'%d' % diffid])\\n for k in _metanamemap.keys():\\n if k in meta:\\n header += b'# %s %s\\\\n' % (_metanamemap[k], meta[k])\\n \\n content = b'%s%s\\\\n%s' % (header, desc, body)\\n write(content)\\n \\n \\n @vcrcommand(\\n b'phabread',\\n [(b'', b'stack', False, _(b'read dependencies'))],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabread(ui, repo, spec, **opts):\\n \\\"\\\"\\\"print patches from Phabricator suitable for importing\\n \\n DREVSPEC could be a Differential Revision identity, like ``D123``, or just\\n the number ``123``. It could also have common operators like ``+``, ``-``,\\n ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to\\n select a stack.\\n \\n ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``\\n could be used to filter patches by status. For performance reason, they\\n only represent a subset of non-status selections and cannot be used alone.\\n \\n For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude\\n D2 and D4. ``:D9 & needsreview`` selects \\\"Needs Review\\\" revisions in a\\n stack up to D9.\\n \\n If --stack is given, follow dependencies information and read all patches.\\n It is equivalent to the ``:`` operator.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n if opts.get(b'stack'):\\n spec = b':(%s)' % spec\\n drevs = querydrev(repo, spec)\\n- readpatch(repo, drevs, ui.write)\\n+ readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n b'phabupdate',\\n [\\n (b'', b'accept', False, _(b'accept revisions')),\\n (b'', b'reject', False, _(b'reject revisions')),\\n (b'', b'abandon', False, _(b'abandon revisions')),\\n (b'', b'reclaim', False, _(b'reclaim revisions')),\\n (b'm', b'comment', b'', _(b'comment on the last revision')),\\n ],\\n _(b'DREVSPEC [OPTIONS]'),\\n helpcategory=command.CATEGORY_IMPORT_EXPORT,\\n )\\n def phabupdate(ui, repo, spec, **opts):\\n \\\"\\\"\\\"update Differential Revision in batch\\n \\n DREVSPEC selects revisions. See :hg:`help phabread` for its usage.\\n \\\"\\\"\\\"\\n opts = pycompat.byteskwargs(opts)\\n flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]\\n if len(flags) \\u003e 1:\\n raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))\\n \\n actions = []\\n for f in flags:\\n actions.append({b'type': f, b'value': True})\\n \\n drevs = querydrev(repo, spec)\\n for i, drev in enumerate(drevs):\\n if i + 1 == len(drevs) and opts.get(b'comment'):\\n actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n if actions:\\n params = {\\n b'objectIdentifier': drev[b'phid'],\\n b'transactions': actions,\\n }\\n callconduit(ui, b'differential.revision.edit', params)\\n \\n \\n @eh.templatekeyword(b'phabreview', requires={b'ctx'})\\n def template_review(context, mapping):\\n \\\"\\\"\\\":phabreview: Object describing the review for this changeset.\\n Has attributes `url` and `id`.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n m = _differentialrevisiondescre.search(ctx.description())\\n if m:\\n return templateutil.hybriddict(\\n {b'url': m.group('url'), b'id': b\\\"D%s\\\" % m.group('id'),}\\n )\\n else:\\n tags = ctx.repo().nodetags(ctx.node())\\n for t in tags:\\n if _differentialrevisiontagre.match(t):\\n url = ctx.repo().ui.config(b'phabricator', b'url')\\n if not url.endswith(b'\\/'):\\n url += b'\\/'\\n url += t\\n \\n return templateutil.hybriddict({b'url': url, b'id': t,})\\n return None\\n \\n \\n @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})\\n def template_status(context, mapping):\\n \\\"\\\"\\\":phabstatus: String. Status of Phabricator differential.\\n \\\"\\\"\\\"\\n ctx = context.resource(mapping, b'ctx')\\n repo = context.resource(mapping, b'repo')\\n ui = context.resource(mapping, b'ui')\\n \\n rev = ctx.rev()\\n try:\\n drevid = getdrevmap(repo, [rev])[rev]\\n except KeyError:\\n return None\\n drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})\\n for drev in drevs:\\n if int(drev[b'id']) == drevid:\\n return templateutil.hybriddict(\\n {b'url': drev[b'uri'], b'status': drev[b'statusName'],}\\n )\\n return None\\n \\n \\n @show.showview(b'phabstatus', csettopic=b'work')\\n def phabstatusshowview(ui, repo, displayer):\\n \\\"\\\"\\\"Phabricator differiential status\\\"\\\"\\\"\\n revs = repo.revs('sort(_underway(), topo)')\\n drevmap = getdrevmap(repo, revs)\\n unknownrevs, drevids, revsbydrevid = [], set([]), {}\\n for rev, drevid in pycompat.iteritems(drevmap):\\n if drevid is not None:\\n drevids.add(drevid)\\n revsbydrevid.setdefault(drevid, set([])).add(rev)\\n else:\\n unknownrevs.append(rev)\\n \\n drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})\\n drevsbyrev = {}\\n for drev in drevs:\\n for rev in revsbydrevid[int(drev[b'id'])]:\\n drevsbyrev[rev] = drev\\n \\n def phabstatus(ctx):\\n drev = drevsbyrev[ctx.rev()]\\n status = ui.label(\\n b'%(statusName)s' % drev,\\n b'phabricator.status.%s' % _getstatusname(drev),\\n )\\n ui.write(b\\\"\\\\n%s %s\\\\n\\\" % (drev[b'uri'], status))\\n \\n revs -= smartset.baseset(unknownrevs)\\n revdag = graphmod.dagwalker(repo, revs)\\n \\n ui.setconfig(b'experimental', b'graphshorten', True)\\n displayer._exthook = phabstatus\\n nodelen = show.longestshortest(repo, revs)\\n logcmdutil.displaygraph(\\n ui,\\n repo,\\n revdag,\\n displayer,\\n graphmod.asciiedges,\\n props={b'nodelen': nodelen},\\n )\\n\"}]}],\"properties\":[]}},\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:05:22 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "status": {
+ "message": "OK",
+ "code": 200
+ }
+ },
+ "request": {
+ "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B20441%2C+20442%2C+20443%5D%7D&output=json&__conduit__=1",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "headers": {
+ "content-length": [
+ "165"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1609,7 +1609,7 @@\\n return meta\\n \\n \\n-def readpatch(repo, drevs, write):\\n+def readpatch(ui, drevs, write):\\n \\\"\\\"\\\"generate plain-text patch readable by 'hg import'\\n \\n write is usually ui.write. drevs is what \\\"querydrev\\\" returns, results of\\n@@ -1617,16 +1617,14 @@\\n \\\"\\\"\\\"\\n # Prefetch hg:meta property for all diffs\\n diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))\\n- diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})\\n+ diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})\\n \\n # Generate patch for each drev\\n for drev in drevs:\\n- repo.ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n+ ui.note(_(b'reading D%s\\\\n') % drev[b'id'])\\n \\n diffid = max(int(v) for v in drev[b'diffs'])\\n- body = callconduit(\\n- repo.ui, b'differential.getrawdiff', {b'diffID': diffid}\\n- )\\n+ body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})\\n desc = getdescfromdrev(drev)\\n header = b'# HG changeset patch\\\\n'\\n \\n@@ -1671,7 +1669,7 @@\\n if opts.get(b'stack'):\\n spec = b':(%s)' % spec\\n drevs = querydrev(repo, spec)\\n- readpatch(repo, drevs, ui.write)\\n+ readpatch(repo.ui, drevs, ui.write)\\n \\n \\n @vcrcommand(\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:05:23 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "status": {
+ "message": "OK",
+ "code": 200
+ }
+ },
+ "request": {
+ "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20441%7D&output=json&__conduit__=1",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "headers": {
+ "content-length": [
+ "144"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1040,11 +1040,11 @@\\n return revision, diff\\n \\n \\n-def userphids(repo, names):\\n+def userphids(ui, names):\\n \\\"\\\"\\\"convert user names to PHIDs\\\"\\\"\\\"\\n names = [name.lower() for name in names]\\n query = {b'constraints': {b'usernames': names}}\\n- result = callconduit(repo.ui, b'user.search', query)\\n+ result = callconduit(ui, b'user.search', query)\\n # username not found is not an error of the API. So check if we have missed\\n # some names here.\\n data = result[b'data']\\n@@ -1127,10 +1127,13 @@\\n blockers = opts.get(b'blocker', [])\\n phids = []\\n if reviewers:\\n- phids.extend(userphids(repo, reviewers))\\n+ phids.extend(userphids(repo.ui, reviewers))\\n if blockers:\\n phids.extend(\\n- map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))\\n+ map(\\n+ lambda phid: b'blocking(%s)' % phid,\\n+ userphids(repo.ui, blockers),\\n+ )\\n )\\n if phids:\\n actions.append({b'type': b'reviewers.add', b'value': phids})\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:05:23 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "status": {
+ "message": "OK",
+ "code": 200
+ }
+ },
+ "request": {
+ "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20443%7D&output=json&__conduit__=1",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "headers": {
+ "content-length": [
+ "144"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "body": {
+ "string": "{\"result\":\"diff --git a\\/hgext\\/phabricator.py b\\/hgext\\/phabricator.py\\n--- a\\/hgext\\/phabricator.py\\n+++ b\\/hgext\\/phabricator.py\\n@@ -1183,7 +1183,7 @@\\n else:\\n # Nothing changed. But still set \\\"newrevphid\\\" so the next revision\\n # could depend on this one and \\\"newrevid\\\" for the summary line.\\n- newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']\\n+ newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']\\n newrevid = revid\\n action = b'skipped'\\n \\n@@ -1398,7 +1398,7 @@\\n return drevs, ancestordrevs\\n \\n \\n-def querydrev(repo, spec):\\n+def querydrev(ui, spec):\\n \\\"\\\"\\\"return a list of \\\"Differential Revision\\\" dicts\\n \\n spec is a string using a simple query language, see docstring in phabread\\n@@ -1449,7 +1449,7 @@\\n key = (params.get(b'ids') or params.get(b'phids') or [None])[0]\\n if key in prefetched:\\n return prefetched[key]\\n- drevs = callconduit(repo.ui, b'differential.query', params)\\n+ drevs = callconduit(ui, b'differential.query', params)\\n # Fill prefetched with the result\\n for drev in drevs:\\n prefetched[drev[b'phid']] = drev\\n@@ -1486,7 +1486,7 @@\\n drevs, ancestordrevs = _prefetchdrevs(tree)\\n \\n # developer config: phabricator.batchsize\\n- batchsize = repo.ui.configint(b'phabricator', b'batchsize')\\n+ batchsize = ui.configint(b'phabricator', b'batchsize')\\n \\n # Prefetch Differential Revisions in batch\\n tofetch = set(drevs)\\n@@ -1668,7 +1668,7 @@\\n opts = pycompat.byteskwargs(opts)\\n if opts.get(b'stack'):\\n spec = b':(%s)' % spec\\n- drevs = querydrev(repo, spec)\\n+ drevs = querydrev(repo.ui, spec)\\n readpatch(repo.ui, drevs, ui.write)\\n \\n \\n@@ -1698,7 +1698,7 @@\\n for f in flags:\\n actions.append({b'type': f, b'value': True})\\n \\n- drevs = querydrev(repo, spec)\\n+ drevs = querydrev(repo.ui, spec)\\n for i, drev in enumerate(drevs):\\n if i + 1 == len(drevs) and opts.get(b'comment'):\\n actions.append({b'type': b'comment', b'value': opts[b'comment']})\\n\\n\",\"error_code\":null,\"error_info\":null}"
+ },
+ "headers": {
+ "x-frame-options": [
+ "Deny"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "date": [
+ "Wed, 04 Mar 2020 22:05:23 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ]
+ },
+ "status": {
+ "message": "OK",
+ "code": 200
+ }
+ },
+ "request": {
+ "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+20442%7D&output=json&__conduit__=1",
+ "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
+ "headers": {
+ "content-length": [
+ "144"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3+33-bb58931d0c4f)"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ]
+ },
+ "method": "POST"
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-add-parent-setup.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,617 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:39 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:39 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21110,\"phid\":\"PHID-DIFF-g25jdc5b5khduwpp3p3b\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21110\\/\"},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "1162"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file1.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file1.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-mod1%5Cn%2Bmod2%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file1.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22d549263bcb2db54042adf048047b368f1ed246df%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:39 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%225d3959e20d1d71d9e906e562f891582b4072570f%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21110%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:40 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%225d3959e20d1d71d9e906e562f891582b4072570f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%225d3959e20d1d71d9e906e562f891582b4072570f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21110%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:40 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 2\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 2\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "155"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+2%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:41 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8433,\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-txmvtexe4kw2pjn\"},{\"phid\":\"PHID-XACT-DREV-l2em7ff7nlcb2fu\"},{\"phid\":\"PHID-XACT-DREV-ttcilh74gwwixze\"},{\"phid\":\"PHID-XACT-DREV-ytvu3llz6pqwg46\"},{\"phid\":\"PHID-XACT-DREV-jeqenvlffsbdv5y\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "308"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-g25jdc5b5khduwpp3p3b%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+2%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:41 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8433\",\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\",\"title\":\"modified 2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8433\",\"dateCreated\":\"1586994221\",\"dateModified\":\"1586994221\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-g25jdc5b5khduwpp3p3b\",\"diffs\":[\"21110\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8433%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:42 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21110%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:42 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21110%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-add-parent.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,1093 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:44 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"21110\":{\"id\":\"21110\",\"revisionID\":\"8433\",\"dateCreated\":\"1586994219\",\"dateModified\":\"1586994221\",\"sourceControlBaseRevision\":\"d549263bcb2db54042adf048047b368f1ed246df\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"57078\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"ftEQkHimiyJo\"},\"oldPath\":\"file1.txt\",\"currentPath\":\"file1.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-mod1\\n+mod2\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"2b4aa8a88d617ca3ba0afd40e0e840b30588f126\",\"parent\":\"d549263bcb2db54042adf048047b368f1ed246df\",\"user\":\"test\"},\"local:commits\":{\"2b4aa8a88d617ca3ba0afd40e0e840b30588f126\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"2b4aa8a88d617ca3ba0afd40e0e840b30588f126\",\"parents\":[\"d549263bcb2db54042adf048047b368f1ed246df\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "154"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8433%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:45 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:45 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21111,\"phid\":\"PHID-DIFF-qat4sqpqqvytzhf7rpti\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21111\\/\"},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "1161"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file1.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file1.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-add%5Cn%2Bmod1%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file1.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%225cbade24e0fae40d67c568e86a978a2a946b9aed%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:46 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%2C+%5C%22parent%5C%22%3A+%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21111%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:46 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21111%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:47 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 1\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 1\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "155"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+1%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:47 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8434,\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-ov2izjdfoe74m42\"},{\"phid\":\"PHID-XACT-DREV-yqeh2if4ea4pio4\"},{\"phid\":\"PHID-XACT-DREV-a74civfucvpjkpl\"},{\"phid\":\"PHID-XACT-DREV-s3ikmvejpazw2cd\"},{\"phid\":\"PHID-XACT-DREV-2wojx4fyghzgkgw\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "308"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-qat4sqpqqvytzhf7rpti%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+1%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:48 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "488"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:48 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "600"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%222b4aa8a88d617ca3ba0afd40e0e840b30588f126%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22d549263bcb2db54042adf048047b368f1ed246df%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:49 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 2\",\"revisionID\":8433},\"revisionIDFieldInfo\":{\"value\":8433,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 2\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "232"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+2%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8433%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:49 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8433,\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-vhgtprwan3k3k6l\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "353"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8433%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-l5ocnglddqa4hwbdzcky%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+2%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:50 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8434\",\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\",\"title\":\"modified 1\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8434\",\"dateCreated\":\"1586994227\",\"dateModified\":\"1586994229\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-qat4sqpqqvytzhf7rpti\",\"diffs\":[\"21111\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8433\",\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\",\"title\":\"modified 2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8433\",\"dateCreated\":\"1586994221\",\"dateModified\":\"1586994229\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-g25jdc5b5khduwpp3p3b\",\"diffs\":[\"21110\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-l5ocnglddqa4hwbdzcky\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "154"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8434%2C+8433%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:50 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parent%5C%22%3A+%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21111%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:51 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21111%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:51 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "488"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:52 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "600"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ }
+ ]
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-extend-end.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,889 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "170"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8387%2C+8387%2C+8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:13 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21008\":{\"id\":\"21008\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380389\",\"dateModified\":\"1586380391\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56827\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"yXvxVg6uibwd\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+modified\\n\"}]},{\"id\":\"56826\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21007\":{\"id\":\"21007\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380382\",\"dateModified\":\"1586380385\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56825\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"yXvxVg6uibwd\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+modified\\n\"}]},{\"id\":\"56824\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21006\":{\"id\":\"21006\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380375\",\"dateModified\":\"1586380377\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56823\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"832553266fe8c3330d968e6987df4ae793483b2b\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"832553266fe8c3330d968e6987df4ae793483b2b\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"921f8265efbd92e92bfa5d7a0e047908de9844a5\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parents\":[\"832553266fe8c3330d968e6987df4ae793483b2b\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:14 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1739"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-added%5Cn%2Bmod3%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Banother+mod%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2298d480e0d494906d9758c44c622951b429afd862%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:14 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21009,\"phid\":\"PHID-DIFF-kvumllarf5ym6ewayp2l\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21009\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2294aaae213b2397d1546801be89f317b63f0a6b93%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21009%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:15 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1746"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2294aaae213b2397d1546801be89f317b63f0a6b93%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2294aaae213b2397d1546801be89f317b63f0a6b93%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21009%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:15 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:16 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:16 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:16 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8387},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "174"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22four%3A+extend+the+fold+range%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:17 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"four: extend the fold range\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"four: extend the fold range\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "781"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8387%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-kvumllarf5ym6ewayp2l%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22This+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%5Cn%5Cnfour%3A+extend+the+fold+range%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:17 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8387,\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-fr3vwmmeqsxaick\"},{\"phid\":\"PHID-XACT-DREV-csghurweoz3dkpp\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:18 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8387\",\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\",\"title\":\"one: first commit to review\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8387\",\"dateCreated\":\"1586380377\",\"dateModified\":\"1586380397\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"This file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\\n\\nfour: extend the fold range\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-kvumllarf5ym6ewayp2l\",\"diffs\":[\"21009\",\"21008\",\"21007\",\"21006\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21009%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:18 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1746"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21009%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:19 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-extend-front.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,960 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "178"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8387%2C+8387%2C+8387%2C+8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:20 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21009\":{\"id\":\"21009\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380394\",\"dateModified\":\"1586380397\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56829\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"r1N4i770De9n\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+another mod\\n\"}]},{\"id\":\"56828\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"51a04fea8707151b9891822fb3c5fc5f11d45f59\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"51a04fea8707151b9891822fb3c5fc5f11d45f59\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"51a04fea8707151b9891822fb3c5fc5f11d45f59\",\"parents\":[\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21008\":{\"id\":\"21008\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380389\",\"dateModified\":\"1586380391\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56827\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"yXvxVg6uibwd\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+modified\\n\"}]},{\"id\":\"56826\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21007\":{\"id\":\"21007\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380382\",\"dateModified\":\"1586380385\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56825\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"yXvxVg6uibwd\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+modified\\n\"}]},{\"id\":\"56824\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21006\":{\"id\":\"21006\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380375\",\"dateModified\":\"1586380377\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56823\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"832553266fe8c3330d968e6987df4ae793483b2b\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"832553266fe8c3330d968e6987df4ae793483b2b\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"921f8265efbd92e92bfa5d7a0e047908de9844a5\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parents\":[\"832553266fe8c3330d968e6987df4ae793483b2b\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:21 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1756"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bmod3%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Banother+mod%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%220000000000000000000000000000000000000000%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:21 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21010,\"phid\":\"PHID-DIFF-rgriot6pr2ef72gyyljl\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21010\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21010%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:22 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2130"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2251a04fea8707151b9891822fb3c5fc5f11d45f59%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21010%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:22 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "155"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22added+file%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:22 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"added file\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"added file\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:23 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:23 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:24 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8387},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "251"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22four%3A+extend+the+fold+range%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:24 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"four: extend the fold range\",\"revisionID\":8387},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"four: extend the fold range\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "765"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-rgriot6pr2ef72gyyljl%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22added+file%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%5Cn%5Cnfour%3A+extend+the+fold+range%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:25 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8388,\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-dlqxlqkjwgga5ql\"},{\"phid\":\"PHID-XACT-DREV-wtmb3lqskq6o45p\"},{\"phid\":\"PHID-XACT-DREV-otuctsq34y7folr\"},{\"phid\":\"PHID-XACT-DREV-f5i6qgxl42cnu7c\"},{\"phid\":\"PHID-XACT-DREV-r773sn4ttwgm3hz\"},{\"phid\":\"PHID-XACT-DREV-rhu5nw6iwa4d2cz\"},{\"phid\":\"PHID-XACT-DREV-acc2lobcwf5ycb4\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8388%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:25 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8388\",\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\",\"title\":\"added file\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8388\",\"dateCreated\":\"1586380405\",\"dateModified\":\"1586380405\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"one: first commit to review\\n\\nThis file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\\n\\nfour: extend the fold range\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-rgriot6pr2ef72gyyljl\",\"diffs\":[\"21010\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22ac7db67f0991021fb38d3cbf31f5d605e694d3a9%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21010%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:26 GMT"
+ ],
+ "connection": [
+ "close"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2130"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22ac7db67f0991021fb38d3cbf31f5d605e694d3a9%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22ac7db67f0991021fb38d3cbf31f5d605e694d3a9%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21010%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:26 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-fold-end.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,957 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "186"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8388%2C+8388%2C+8388%2C+8388%2C+8388%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:36 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21011\":{\"id\":\"21011\",\"revisionID\":\"8388\",\"dateCreated\":\"1586380410\",\"dateModified\":\"1586380414\",\"sourceControlBaseRevision\":\"0000000000000000000000000000000000000000\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56834\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"NiiVAF6I6ZTP\"},\"oldPath\":null,\"currentPath\":\"file3.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+foo\\n\"}]},{\"id\":\"56833\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"fNX0XXem8Zx_\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+amended mod\\n\"}]},{\"id\":\"56832\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":null,\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"b50946d5e4901d7f7801d572342ef90d06a85a85\",\"parent\":\"0000000000000000000000000000000000000000\",\"user\":\"test\"},\"local:commits\":{\"15e9b14b4b4c37ccd18298d058a184068718765e\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"15e9b14b4b4c37ccd18298d058a184068718765e\",\"parents\":[\"0000000000000000000000000000000000000000\"],\"time\":0},\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\",\"parents\":[\"3ee132d41dbc24bf5a4df3e97573d6e922446565\"],\"time\":0},\"3ee132d41dbc24bf5a4df3e97573d6e922446565\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3ee132d41dbc24bf5a4df3e97573d6e922446565\",\"parents\":[\"6320b7d714cf2b2b71370eafeae29e2988d07872\"],\"time\":0},\"6320b7d714cf2b2b71370eafeae29e2988d07872\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"6320b7d714cf2b2b71370eafeae29e2988d07872\",\"parents\":[\"15e9b14b4b4c37ccd18298d058a184068718765e\"],\"time\":0},\"6bc15dc99efd596be5908c187f68d6810671121e\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"6bc15dc99efd596be5908c187f68d6810671121e\",\"parents\":[\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\"],\"time\":0},\"b50946d5e4901d7f7801d572342ef90d06a85a85\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"b50946d5e4901d7f7801d572342ef90d06a85a85\",\"parents\":[\"6bc15dc99efd596be5908c187f68d6810671121e\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21010\":{\"id\":\"21010\",\"revisionID\":\"8388\",\"dateCreated\":\"1586380401\",\"dateModified\":\"1586380405\",\"sourceControlBaseRevision\":\"0000000000000000000000000000000000000000\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56831\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"r1N4i770De9n\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+another mod\\n\"}]},{\"id\":\"56830\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":null,\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\",\"parent\":\"0000000000000000000000000000000000000000\",\"user\":\"test\"},\"local:commits\":{\"15e9b14b4b4c37ccd18298d058a184068718765e\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"15e9b14b4b4c37ccd18298d058a184068718765e\",\"parents\":[\"0000000000000000000000000000000000000000\"],\"time\":0},\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\",\"parents\":[\"3ee132d41dbc24bf5a4df3e97573d6e922446565\"],\"time\":0},\"3ee132d41dbc24bf5a4df3e97573d6e922446565\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3ee132d41dbc24bf5a4df3e97573d6e922446565\",\"parents\":[\"6320b7d714cf2b2b71370eafeae29e2988d07872\"],\"time\":0},\"6320b7d714cf2b2b71370eafeae29e2988d07872\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"6320b7d714cf2b2b71370eafeae29e2988d07872\",\"parents\":[\"15e9b14b4b4c37ccd18298d058a184068718765e\"],\"time\":0},\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\",\"parents\":[\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:37 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2327"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bmod3%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bamended+mod%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%2C+%22file3.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file3.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bfoo%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%220000000000000000000000000000000000000000%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:37 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21012,\"phid\":\"PHID-DIFF-xfr3co4kuic6jeu3xbz4\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21012\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21012%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:38 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2130"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21012%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:38 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "232"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22added+file%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:39 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"added file\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"added file\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:39 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8388,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:40 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8388,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:40 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "251"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22four%3A+extend+the+fold+range%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:40 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"four: extend the fold range\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"four: extend the fold range\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "799"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8388%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-xfr3co4kuic6jeu3xbz4%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22added+file%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%5Cn%5Cnfour%3A+extend+the+fold+range%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:41 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8388,\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-6o5t2mriosy3fst\"},{\"phid\":\"PHID-XACT-DREV-m25qxnu4ok3w5y5\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8388%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:42 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8388\",\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\",\"title\":\"added file\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8388\",\"dateCreated\":\"1586380405\",\"dateModified\":\"1586380421\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":3,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"one: first commit to review\\n\\nThis file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\\n\\nfour: extend the fold range\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-xfr3co4kuic6jeu3xbz4\",\"diffs\":[\"21012\",\"21011\",\"21010\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21012%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:42 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2130"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21012%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:42 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-immutable.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,617 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:49 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1160"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-added%5Cn%2Bmod3%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2298d480e0d494906d9758c44c622951b429afd862%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:50 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21005,\"phid\":\"PHID-DIFF-wsgu5klkhfav5w7whedy\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21005\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21005%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:50 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21005%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:50 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "287"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:51 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "284"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:51 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:52 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "710"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-wsgu5klkhfav5w7whedy%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22This+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:52 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8386,\"phid\":\"PHID-DREV-vljbu77pqc7mc7kbjtda\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-53554u7xxma5ilf\"},{\"phid\":\"PHID-XACT-DREV-uuawwpix7ys5h2o\"},{\"phid\":\"PHID-XACT-DREV-ayqnuzfys4wwult\"},{\"phid\":\"PHID-XACT-DREV-7cwnl5hsynwvae4\"},{\"phid\":\"PHID-XACT-DREV-q2g3p7fuwk2eldg\"},{\"phid\":\"PHID-XACT-DREV-32j4eonnv72por3\"},{\"phid\":\"PHID-XACT-DREV-h4hqh7acick3f6t\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8386%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:53 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8386\",\"phid\":\"PHID-DREV-vljbu77pqc7mc7kbjtda\",\"title\":\"one: first commit to review\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8386\",\"dateCreated\":\"1586380372\",\"dateModified\":\"1586380372\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"This file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-wsgu5klkhfav5w7whedy\",\"diffs\":[\"21005\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-initial.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,753 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:54 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1160"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-added%5Cn%2Bmod3%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2298d480e0d494906d9758c44c622951b429afd862%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:55 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21006,\"phid\":\"PHID-DIFF-j6mteixl347wzuphwdft\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21006\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21006%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:55 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22a959a3f69d8d498d96102cda0bc03ed89706891e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22d235829e802c98c0171dac4a265b4b9bad07093a%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2224a4438154bad90603d53a4dd31d8daff7844b36%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21006%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:55 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "287"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:56 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "284"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:56 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:57 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "710"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-j6mteixl347wzuphwdft%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22This+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:57 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8387,\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-ypn7okwtaudnadw\"},{\"phid\":\"PHID-XACT-DREV-g2r45z4erxl56g4\"},{\"phid\":\"PHID-XACT-DREV-h6dgu3eetalcv72\"},{\"phid\":\"PHID-XACT-DREV-rl7yelqfeezlivr\"},{\"phid\":\"PHID-XACT-DREV-j7cpnfpbxxphsbp\"},{\"phid\":\"PHID-XACT-DREV-57vpcydiiiu4zu5\"},{\"phid\":\"PHID-XACT-DREV-dou37gk5rry34zv\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:58 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8387\",\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\",\"title\":\"one: first commit to review\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8387\",\"dateCreated\":\"1586380377\",\"dateModified\":\"1586380377\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"This file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-j6mteixl347wzuphwdft\",\"diffs\":[\"21006\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22921f8265efbd92e92bfa5d7a0e047908de9844a5%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21006%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:58 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22832553266fe8c3330d968e6987df4ae793483b2b%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22832553266fe8c3330d968e6987df4ae793483b2b%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22921f8265efbd92e92bfa5d7a0e047908de9844a5%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22921f8265efbd92e92bfa5d7a0e047908de9844a5%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22832553266fe8c3330d968e6987df4ae793483b2b%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21006%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:12:59 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-no-changes.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,685 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "170"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8387%2C+8387%2C+8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:08 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21007\":{\"id\":\"21007\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380382\",\"dateModified\":\"1586380385\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56825\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"yXvxVg6uibwd\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+modified\\n\"}]},{\"id\":\"56824\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac\",\"parents\":[\"0124e5474c880e4fb40c8326ad2b75ae3e57ee5f\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21006\":{\"id\":\"21006\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380375\",\"dateModified\":\"1586380377\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56823\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"832553266fe8c3330d968e6987df4ae793483b2b\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"832553266fe8c3330d968e6987df4ae793483b2b\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"921f8265efbd92e92bfa5d7a0e047908de9844a5\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parents\":[\"832553266fe8c3330d968e6987df4ae793483b2b\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:08 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1736"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-added%5Cn%2Bmod3%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bmodified%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2298d480e0d494906d9758c44c622951b429afd862%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:08 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21008,\"phid\":\"PHID-DIFF-wwqrxqimkcomhphpy4fb\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21008\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21008%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:09 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21008%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:09 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:10 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:10 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:11 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8387},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "744"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8387%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-wwqrxqimkcomhphpy4fb%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22This+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:11 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8387,\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-yev5udwjourvj2j\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:12 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8387\",\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\",\"title\":\"one: first commit to review\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8387\",\"dateCreated\":\"1586380377\",\"dateModified\":\"1586380391\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"This file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-wwqrxqimkcomhphpy4fb\",\"diffs\":[\"21008\",\"21007\",\"21006\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-split-end.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,1028 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "194"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8388%2C+8388%2C+8388%2C+8388%2C+8388%2C+8388%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:29 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21010\":{\"id\":\"21010\",\"revisionID\":\"8388\",\"dateCreated\":\"1586380401\",\"dateModified\":\"1586380405\",\"sourceControlBaseRevision\":\"0000000000000000000000000000000000000000\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56831\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"r1N4i770De9n\"},\"oldPath\":null,\"currentPath\":\"file2.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+another mod\\n\"}]},{\"id\":\"56830\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":null,\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\",\"parent\":\"0000000000000000000000000000000000000000\",\"user\":\"test\"},\"local:commits\":{\"15e9b14b4b4c37ccd18298d058a184068718765e\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"15e9b14b4b4c37ccd18298d058a184068718765e\",\"parents\":[\"0000000000000000000000000000000000000000\"],\"time\":0},\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\",\"parents\":[\"3ee132d41dbc24bf5a4df3e97573d6e922446565\"],\"time\":0},\"3ee132d41dbc24bf5a4df3e97573d6e922446565\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3ee132d41dbc24bf5a4df3e97573d6e922446565\",\"parents\":[\"6320b7d714cf2b2b71370eafeae29e2988d07872\"],\"time\":0},\"6320b7d714cf2b2b71370eafeae29e2988d07872\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"6320b7d714cf2b2b71370eafeae29e2988d07872\",\"parents\":[\"15e9b14b4b4c37ccd18298d058a184068718765e\"],\"time\":0},\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"ac7db67f0991021fb38d3cbf31f5d605e694d3a9\",\"parents\":[\"30682b960804bd91823ea5ccb5d6a7d999f8d1ea\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:29 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2327"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bmod3%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bamended+mod%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%2C+%22file3.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file3.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bfoo%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%220000000000000000000000000000000000000000%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:30 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21011,\"phid\":\"PHID-DIFF-vyaqmcwywxowtnjrfkkw\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21011\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21011%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:30 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2514"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21011%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:30 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "232"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22added+file%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:31 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"added file\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"added file\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:31 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8388,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:32 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8388,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:32 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "251"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22four%3A+extend+the+fold+range%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:33 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"four: extend the fold range\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"four: extend the fold range\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "251"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22four%3A+extend+the+fold+range%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8388%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:33 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"four: extend the fold range\",\"revisionID\":8388},\"revisionIDFieldInfo\":{\"value\":8388,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"four: extend the fold range\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "836"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8388%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-vyaqmcwywxowtnjrfkkw%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22added+file%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%5Cn%5Cnfour%3A+extend+the+fold+range%5Cn%5Cnfour%3A+extend+the+fold+range%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:33 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8388,\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-hnejfcaarddna5z\"},{\"phid\":\"PHID-XACT-DREV-7icktpjfcm77foq\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8388%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:34 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8388\",\"phid\":\"PHID-DREV-v5iywrnzj4h4uwo57pob\",\"title\":\"added file\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8388\",\"dateCreated\":\"1586380405\",\"dateModified\":\"1586380414\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":3,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"one: first commit to review\\n\\nThis file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\\n\\nfour: extend the fold range\\n\\nfour: extend the fold range\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-vyaqmcwywxowtnjrfkkw\",\"diffs\":[\"21011\",\"21010\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220000000000000000000000000000000000000000%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21011%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:35 GMT"
+ ],
+ "connection": [
+ "close"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "2514"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220000000000000000000000000000000000000000%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%223ee132d41dbc24bf5a4df3e97573d6e922446565%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226320b7d714cf2b2b71370eafeae29e2988d07872%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2215e9b14b4b4c37ccd18298d058a184068718765e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2230682b960804bd91823ea5ccb5d6a7d999f8d1ea%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22b50946d5e4901d7f7801d572342ef90d06a85a85%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%226bc15dc99efd596be5908c187f68d6810671121e%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21011%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:35 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-fold-updated.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,824 @@
+{
+ "interactions": [
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "170"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8387%2C+8387%2C+8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:01 GMT"
+ ],
+ "connection": [
+ "close"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"21006\":{\"id\":\"21006\",\"revisionID\":\"8387\",\"dateCreated\":\"1586380375\",\"dateModified\":\"1586380377\",\"sourceControlBaseRevision\":\"98d480e0d494906d9758c44c622951b429afd862\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"56823\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"cAKuVP31KNrx\"},\"oldPath\":\"file.txt\",\"currentPath\":\"file.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-added\\n+mod3\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parent\":\"98d480e0d494906d9758c44c622951b429afd862\",\"user\":\"test\"},\"local:commits\":{\"602c4e7382436988e46c0d56fb3883968302b3f6\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"602c4e7382436988e46c0d56fb3883968302b3f6\",\"parents\":[\"98d480e0d494906d9758c44c622951b429afd862\"],\"time\":0},\"832553266fe8c3330d968e6987df4ae793483b2b\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"832553266fe8c3330d968e6987df4ae793483b2b\",\"parents\":[\"602c4e7382436988e46c0d56fb3883968302b3f6\"],\"time\":0},\"921f8265efbd92e92bfa5d7a0e047908de9844a5\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"921f8265efbd92e92bfa5d7a0e047908de9844a5\",\"parents\":[\"832553266fe8c3330d968e6987df4ae793483b2b\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:02 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1736"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-added%5Cn%2Bmod3%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%2C+%22file2.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file2.txt%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22%2Bmodified%5Cn%22%2C+%22delLines%22%3A+0%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+0%2C+%22oldOffset%22%3A+0%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2298d480e0d494906d9758c44c622951b429afd862%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:02 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21007,\"phid\":\"PHID-DIFF-zkluur4ou3j4pswpif6e\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21007\\/\"},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21007%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:03 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21007%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:03 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "364"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22one%3A+first+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnLOL%21++What+testing%3F%21%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:04 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"one: first commit to review\",\"testPlan\":\"LOL! What testing?!\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod1' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"one: first commit to review\"},{\"type\":\"testPlan\",\"value\":\"LOL! What testing?!\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod1' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "361"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22two%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5CnTest+Plan%3A%5CnHaha%21+yeah%2C+right.%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:04 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"two: second commit to review\",\"testPlan\":\"Haha! yeah, right.\",\"revisionID\":8387,\"summary\":\"This file was modified with 'mod2' as its contents.\"},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"two: second commit to review\"},{\"type\":\"testPlan\",\"value\":\"Haha! yeah, right.\"},{\"type\":\"summary\",\"value\":\"This file was modified with 'mod2' as its contents.\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "260"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%223%3A+a+commit+with+no+detailed+message%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8387%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:04 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"3: a commit with no detailed message\",\"revisionID\":8387},\"revisionIDFieldInfo\":{\"value\":8387,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"3: a commit with no detailed message\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "744"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8387%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-zkluur4ou3j4pswpif6e%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22one%3A+first+commit+to+review%22%7D%2C+%7B%22type%22%3A+%22testPlan%22%2C+%22value%22%3A+%22LOL%21++What+testing%3F%21%5Cn%5CnHaha%21+yeah%2C+right.%22%7D%2C+%7B%22type%22%3A+%22summary%22%2C+%22value%22%3A+%22This+file+was+modified+with+%27mod1%27+as+its+contents.%5Cn%5Cntwo%3A+second+commit+to+review%5Cn%5CnThis+file+was+modified+with+%27mod2%27+as+its+contents.%5Cn%5Cn3%3A+a+commit+with+no+detailed+message%22%7D%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:05 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8387,\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-thuey6pboatact4\"}]},\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "146"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8387%5D%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:06 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8387\",\"phid\":\"PHID-DREV-6ov3pvjim4txejzekw2t\",\"title\":\"one: first commit to review\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8387\",\"dateCreated\":\"1586380377\",\"dateModified\":\"1586380385\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"This file was modified with 'mod1' as its contents.\\n\\ntwo: second commit to review\\n\\nThis file was modified with 'mod2' as its contents.\\n\\n3: a commit with no detailed message\",\"testPlan\":\"LOL! What testing?!\\n\\nHaha! yeah, right.\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-zkluur4ou3j4pswpif6e\",\"diffs\":[\"21007\",\"21006\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"],[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21007%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:06 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ },
+ {
+ "request": {
+ "headers": {
+ "content-length": [
+ "1362"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+463-d35fbae9dd33+20200408)"
+ ]
+ },
+ "body": "__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22602c4e7382436988e46c0d56fb3883968302b3f6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2298d480e0d494906d9758c44c622951b429afd862%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%2C+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22e4edb1fe3565eaaecc1787ada4fcc2b7e9018fac%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220124e5474c880e4fb40c8326ad2b75ae3e57ee5f%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21007%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ },
+ "response": {
+ "headers": {
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ],
+ "date": [
+ "Wed, 08 Apr 2020 21:13:06 GMT"
+ ]
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ }
+ }
+ }
+ ],
+ "version": 1
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabsend-no-restack-orphan.json Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,1229 @@
+{
+ "version": 1,
+ "interactions": [
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:54 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"21111\":{\"id\":\"21111\",\"revisionID\":\"8434\",\"dateCreated\":\"1586994225\",\"dateModified\":\"1586994227\",\"sourceControlBaseRevision\":\"5cbade24e0fae40d67c568e86a978a2a946b9aed\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"57079\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"xfwwSr8O83OT\"},\"oldPath\":\"file1.txt\",\"currentPath\":\"file1.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-add\\n+mod1\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"parent\":\"5cbade24e0fae40d67c568e86a978a2a946b9aed\",\"user\":\"test\"},\"local:commits\":{\"876a60d024de493e35a1c6f963f2604056cdc0b9\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"parents\":[\"5cbade24e0fae40d67c568e86a978a2a946b9aed\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21110\":{\"id\":\"21110\",\"revisionID\":\"8433\",\"dateCreated\":\"1586994219\",\"dateModified\":\"1586994221\",\"sourceControlBaseRevision\":\"d549263bcb2db54042adf048047b368f1ed246df\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"57078\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"ftEQkHimiyJo\"},\"oldPath\":\"file1.txt\",\"currentPath\":\"file1.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-mod1\\n+mod2\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\",\"parent\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"user\":\"test\"},\"local:commits\":{\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\",\"parents\":[\"876a60d024de493e35a1c6f963f2604056cdc0b9\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "162"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8434%2C+8433%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:55 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "488"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parent%5C%22%3A+%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221111%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:55 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "600"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221111%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:56 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 1\",\"revisionID\":8434},\"revisionIDFieldInfo\":{\"value\":8434,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 1\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "232"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+1%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8434%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:56 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8434,\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\"},\"transactions\":[]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "251"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8434%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+1%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:57 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "488"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:57 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "600"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:58 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 2\",\"revisionID\":8433},\"revisionIDFieldInfo\":{\"value\":8433,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 2\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "232"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+2%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8433%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:58 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8433,\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\"},\"transactions\":[]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "353"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8433%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-l5ocnglddqa4hwbdzcky%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+2%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:59 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "183"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:43:59 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"diffid\":21112,\"phid\":\"PHID-DIFF-5hagl525ogjltlaimw2a\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21112\\/\"},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "1166"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file1.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file1.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-mod2%5Cn%2Bobsolete%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file1.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:00 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:00 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:01 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified A\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified A\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "155"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+A%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:01 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":{\"object\":{\"id\":8435,\"phid\":\"PHID-DREV-wn3f4ni4p5n6juwqeskr\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-6pr73p6guxsdz4p\"},{\"phid\":\"PHID-XACT-DREV-qpigriq2bzc6xgf\"},{\"phid\":\"PHID-XACT-DREV-czslffarxz4pbmz\"},{\"phid\":\"PHID-XACT-DREV-4ahvaedacwbzzyv\"},{\"phid\":\"PHID-XACT-DREV-kmjjhdw47orongi\"},{\"phid\":\"PHID-XACT-DREV-65ahhu2fov5rwog\"}]},\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "410"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-5hagl525ogjltlaimw2a%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-kpkwhtylyxrzikfspl5r%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+A%22%7D%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:02 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":[{\"id\":\"8435\",\"phid\":\"PHID-DREV-wn3f4ni4p5n6juwqeskr\",\"title\":\"modified A\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8435\",\"dateCreated\":\"1586994241\",\"dateModified\":\"1586994241\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-5hagl525ogjltlaimw2a\",\"diffs\":[\"21112\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-kpkwhtylyxrzikfspl5r\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8433\",\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\",\"title\":\"modified 2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8433\",\"dateCreated\":\"1586994221\",\"dateModified\":\"1586994241\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-g25jdc5b5khduwpp3p3b\",\"diffs\":[\"21110\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-l5ocnglddqa4hwbdzcky\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8434\",\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\",\"title\":\"modified 1\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8434\",\"dateCreated\":\"1586994227\",\"dateModified\":\"1586994236\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-qat4sqpqqvytzhf7rpti\",\"diffs\":[\"21111\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "162"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8434%2C+8433%2C+8435%5D%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.query",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:02 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "482"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ },
+ {
+ "response": {
+ "headers": {
+ "referrer-policy": [
+ "no-referrer"
+ ],
+ "x-frame-options": [
+ "Deny"
+ ],
+ "date": [
+ "Wed, 15 Apr 2020 23:44:03 GMT"
+ ],
+ "server": [
+ "Apache/2.4.10 (Debian)"
+ ],
+ "content-type": [
+ "application/json"
+ ],
+ "expires": [
+ "Sat, 01 Jan 2000 00:00:00 GMT"
+ ],
+ "transfer-encoding": [
+ "chunked"
+ ],
+ "x-xss-protection": [
+ "1; mode=block"
+ ],
+ "strict-transport-security": [
+ "max-age=0; includeSubdomains; preload"
+ ],
+ "cache-control": [
+ "no-store"
+ ],
+ "x-content-type-options": [
+ "nosniff"
+ ]
+ },
+ "status": {
+ "code": 200,
+ "message": "OK"
+ },
+ "body": {
+ "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
+ }
+ },
+ "request": {
+ "headers": {
+ "content-length": [
+ "594"
+ ],
+ "content-type": [
+ "application/x-www-form-urlencoded"
+ ],
+ "user-agent": [
+ "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
+ ],
+ "accept": [
+ "application/mercurial-0.1"
+ ],
+ "host": [
+ "phab.mercurial-scm.org"
+ ]
+ },
+ "body": "output=json&__conduit__=1¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22local%3Acommits%22%7D",
+ "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
+ "method": "POST"
+ }
+ }
+ ]
+}
\ No newline at end of file
--- a/tests/run-tests.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/run-tests.py Thu Apr 16 22:51:09 2020 +0530
@@ -53,6 +53,7 @@
import json
import multiprocessing
import os
+import platform
import random
import re
import shutil
@@ -143,12 +144,12 @@
PYTHON3 = True
xrange = range # we use xrange in one place, and we'd rather not use range
- def _bytespath(p):
+ def _sys2bytes(p):
if p is None:
return p
return p.encode('utf-8')
- def _strpath(p):
+ def _bytes2sys(p):
if p is None:
return p
return p.decode('utf-8')
@@ -165,34 +166,34 @@
self._strenv = strenv
def __getitem__(self, k):
- v = self._strenv.__getitem__(_strpath(k))
- return _bytespath(v)
+ v = self._strenv.__getitem__(_bytes2sys(k))
+ return _sys2bytes(v)
def __setitem__(self, k, v):
- self._strenv.__setitem__(_strpath(k), _strpath(v))
+ self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
def __delitem__(self, k):
- self._strenv.__delitem__(_strpath(k))
+ self._strenv.__delitem__(_bytes2sys(k))
def __contains__(self, k):
- return self._strenv.__contains__(_strpath(k))
+ return self._strenv.__contains__(_bytes2sys(k))
def __iter__(self):
- return iter([_bytespath(k) for k in iter(self._strenv)])
+ return iter([_sys2bytes(k) for k in iter(self._strenv)])
def get(self, k, default=None):
- v = self._strenv.get(_strpath(k), _strpath(default))
- return _bytespath(v)
+ v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
+ return _sys2bytes(v)
def pop(self, k, default=None):
- v = self._strenv.pop(_strpath(k), _strpath(default))
- return _bytespath(v)
+ v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
+ return _sys2bytes(v)
osenvironb = environbytes(os.environ)
getcwdb = getattr(os, 'getcwdb')
if not getcwdb or os.name == 'nt':
- getcwdb = lambda: _bytespath(os.getcwd())
+ getcwdb = lambda: _sys2bytes(os.getcwd())
elif sys.version_info >= (3, 0, 0):
print(
@@ -207,10 +208,10 @@
# bytestrings by default, so we don't have to do any extra
# fiddling there. We define the wrapper functions anyway just to
# help keep code consistent between platforms.
- def _bytespath(p):
+ def _sys2bytes(p):
return p
- _strpath = _bytespath
+ _bytes2sys = _sys2bytes
osenvironb = os.environ
getcwdb = os.getcwd
@@ -274,10 +275,10 @@
def Popen4(cmd, wd, timeout, env=None):
processlock.acquire()
p = subprocess.Popen(
- _strpath(cmd),
+ _bytes2sys(cmd),
shell=True,
bufsize=-1,
- cwd=_strpath(wd),
+ cwd=_bytes2sys(wd),
env=env,
close_fds=closefds,
stdin=subprocess.PIPE,
@@ -315,12 +316,12 @@
else:
raise AssertionError('Could not find Python interpreter')
-PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
+PYTHON = _sys2bytes(sysexecutable.replace('\\', '/'))
IMPL_PATH = b'PYTHONPATH'
if 'java' in sys.platform:
IMPL_PATH = b'JYTHONPATH'
-defaults = {
+default_defaults = {
'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
'timeout': ('HGTEST_TIMEOUT', 180),
'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
@@ -328,6 +329,8 @@
'shell': ('HGTEST_SHELL', 'sh'),
}
+defaults = default_defaults.copy()
+
def canonpath(path):
return os.path.realpath(os.path.expanduser(path))
@@ -530,6 +533,9 @@
action="store_true",
help="install and use chg wrapper in place of hg",
)
+ hgconf.add_argument(
+ "--chg-debug", action="store_true", help="show chg debug logs",
+ )
hgconf.add_argument("--compiler", help="compiler to build with")
hgconf.add_argument(
'--extra-config-opt',
@@ -555,10 +561,14 @@
help="use pure Python code instead of C extensions",
)
hgconf.add_argument(
- "-3",
- "--py3-warnings",
+ "--rust",
action="store_true",
- help="enable Py3k warnings on Python 2.7+",
+ help="use Rust code alongside C extensions",
+ )
+ hgconf.add_argument(
+ "--no-rust",
+ action="store_true",
+ help="do not use Rust code even if compiled",
)
hgconf.add_argument(
"--with-chg",
@@ -643,10 +653,19 @@
if 'java' in sys.platform or '__pypy__' in sys.modules:
options.pure = True
+ if platform.python_implementation() != 'CPython' and options.rust:
+ parser.error('Rust extensions are only available with CPython')
+
+ if options.pure and options.rust:
+ parser.error('--rust cannot be used with --pure')
+
+ if options.rust and options.no_rust:
+ parser.error('--rust cannot be used with --no-rust')
+
if options.local:
if options.with_hg or options.with_chg:
parser.error('--local cannot be used with --with-hg or --with-chg')
- testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
+ testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
reporootdir = os.path.dirname(testdir)
pathandattrs = [(b'hg', 'with_hg')]
if options.chg:
@@ -658,10 +677,10 @@
'--local specified, but %r not found or '
'not executable' % binpath
)
- setattr(options, attr, _strpath(binpath))
+ setattr(options, attr, _bytes2sys(binpath))
if options.with_hg:
- options.with_hg = canonpath(_bytespath(options.with_hg))
+ options.with_hg = canonpath(_sys2bytes(options.with_hg))
if not (
os.path.isfile(options.with_hg)
and os.access(options.with_hg, os.X_OK)
@@ -675,7 +694,7 @@
parser.error('chg does not work on %s' % os.name)
if options.with_chg:
options.chg = False # no installation to temporary location
- options.with_chg = canonpath(_bytespath(options.with_chg))
+ options.with_chg = canonpath(_sys2bytes(options.with_chg))
if not (
os.path.isfile(options.with_chg)
and os.access(options.with_chg, os.X_OK)
@@ -748,9 +767,6 @@
)
options.timeout = 0
options.slowtimeout = 0
- if options.py3_warnings:
- if PYTHON3:
- parser.error('--py3-warnings can only be used on Python 2.7')
if options.blacklist:
options.blacklist = parselistfiles(options.blacklist, 'blacklist')
@@ -909,11 +925,11 @@
timeout=None,
startport=None,
extraconfigopts=None,
- py3warnings=False,
shell=None,
hgcommand=None,
slowtimeout=None,
usechg=False,
+ chgdebug=False,
useipv6=False,
):
"""Create a test from parameters.
@@ -942,8 +958,6 @@
must have the form "key=value" (something understood by hgrc). Values
of the form "foo.key=value" will result in "[foo] key=value".
- py3warnings enables Py3k warnings.
-
shell is the shell to execute tests in.
"""
if timeout is None:
@@ -954,7 +968,7 @@
slowtimeout = defaults['slowtimeout']
self.path = path
self.bname = os.path.basename(path)
- self.name = _strpath(self.bname)
+ self.name = _bytes2sys(self.bname)
self._testdir = os.path.dirname(path)
self._outputdir = outputdir
self._tmpname = os.path.basename(path)
@@ -968,10 +982,10 @@
self._slowtimeout = slowtimeout
self._startport = startport
self._extraconfigopts = extraconfigopts or []
- self._py3warnings = py3warnings
- self._shell = _bytespath(shell)
+ self._shell = _sys2bytes(shell)
self._hgcommand = hgcommand or b'hg'
self._usechg = usechg
+ self._chgdebug = chgdebug
self._useipv6 = useipv6
self._aborted = False
@@ -1178,10 +1192,7 @@
if self._keeptmpdir:
log(
'\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
- % (
- self._testtmp.decode('utf-8'),
- self._threadtmp.decode('utf-8'),
- )
+ % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
)
else:
try:
@@ -1281,7 +1292,7 @@
environment."""
# Put the restoreenv script inside self._threadtmp
scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
- testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
+ testenv['HGTEST_RESTOREENV'] = _bytes2sys(scriptpath)
# Only restore environment variable names that the shell allows
# us to export.
@@ -1315,15 +1326,18 @@
env = os.environ.copy()
env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
env['HGEMITWARNINGS'] = '1'
- env['TESTTMP'] = _strpath(self._testtmp)
+ env['TESTTMP'] = _bytes2sys(self._testtmp)
env['TESTNAME'] = self.name
- env['HOME'] = _strpath(self._testtmp)
+ env['HOME'] = _bytes2sys(self._testtmp)
+ formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
+ env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
+ env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
# This number should match portneeded in _getport
for port in xrange(3):
# This list should be parallel to _portmap in _getreplacements
defineport(port)
- env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
- env["DAEMON_PIDS"] = _strpath(
+ env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
+ env["DAEMON_PIDS"] = _bytes2sys(
os.path.join(self._threadtmp, b'daemon.pids')
)
env["HGEDITOR"] = (
@@ -1344,7 +1358,7 @@
extraextensions = []
for opt in self._extraconfigopts:
- section, key = opt.encode('utf-8').split(b'.', 1)
+ section, key = _sys2bytes(opt).split(b'.', 1)
if section != 'extensions':
continue
name = key.split(b'=', 1)[0]
@@ -1355,7 +1369,7 @@
# LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
# IP addresses.
- env['LOCALIP'] = _strpath(self._localip())
+ env['LOCALIP'] = _bytes2sys(self._localip())
# This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
# but this is needed for testing python instances like dummyssh,
@@ -1413,6 +1427,8 @@
if self._usechg:
env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
+ if self._chgdebug:
+ env['CHGDEBUG'] = 'true'
return env
@@ -1441,11 +1457,11 @@
)
hgrc.write(b'[web]\n')
hgrc.write(b'address = localhost\n')
- hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
+ hgrc.write(b'ipv6 = %r\n' % self._useipv6)
hgrc.write(b'server-header = testing stub value\n')
for opt in self._extraconfigopts:
- section, key = opt.encode('utf-8').split(b'.', 1)
+ section, key = _sys2bytes(opt).split(b'.', 1)
assert b'=' in key, (
'extra config opt %s must ' 'have an = for assignment' % opt
)
@@ -1464,7 +1480,10 @@
"""
if self._debug:
proc = subprocess.Popen(
- _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
+ _bytes2sys(cmd),
+ shell=True,
+ cwd=_bytes2sys(self._testtmp),
+ env=env,
)
ret = proc.wait()
return (ret, None)
@@ -1515,9 +1534,8 @@
return os.path.join(self._testdir, b'%s.out' % self.bname)
def _run(self, env):
- py3switch = self._py3warnings and b' -3' or b''
# Quote the python(3) executable for Windows
- cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
+ cmd = b'"%s" "%s"' % (PYTHON, self.path)
vlog("# Running", cmd.decode("utf-8"))
normalizenewlines = os.name == 'nt'
result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
@@ -1564,7 +1582,7 @@
NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
- ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
+ ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)}
ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
def __init__(self, path, *args, **kwds):
@@ -1575,7 +1593,7 @@
super(TTest, self).__init__(path, *args, **kwds)
if case:
casepath = b'#'.join(case)
- self.name = '%s#%s' % (self.name, _strpath(casepath))
+ self.name = '%s#%s' % (self.name, _bytes2sys(casepath))
self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
self._tmpname += b'-%s' % casepath
self._have = {}
@@ -2081,12 +2099,10 @@
for line in lines:
if line.startswith(TTest.SKIPPED_PREFIX):
line = line.splitlines()[0]
- missing.append(
- line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
- )
+ missing.append(_bytes2sys(line[len(TTest.SKIPPED_PREFIX) :]))
elif line.startswith(TTest.FAILED_PREFIX):
line = line.splitlines()[0]
- failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
+ failed.append(_bytes2sys(line[len(TTest.FAILED_PREFIX) :]))
return missing, failed
@@ -2213,7 +2229,7 @@
v = self._options.view
subprocess.call(
r'"%s" "%s" "%s"'
- % (v, _strpath(test.refpath), _strpath(test.errpath)),
+ % (v, _bytes2sys(test.refpath), _bytes2sys(test.errpath)),
shell=True,
)
else:
@@ -2535,7 +2551,7 @@
def savetimes(outputdir, result):
saved = dict(loadtimes(outputdir))
maxruns = 5
- skipped = set([str(t[0]) for t in result.skipped])
+ skipped = {str(t[0]) for t in result.skipped}
for tdata in result.times:
test, real = tdata[0], tdata[3]
if test not in skipped:
@@ -2702,7 +2718,7 @@
opts = ''
withhg = self._runner.options.with_hg
if withhg:
- opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
+ opts += ' --with-hg=%s ' % shellquote(_bytes2sys(withhg))
rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
data = pread(bisectcmd + ['--command', rtc])
m = re.search(
@@ -2748,7 +2764,7 @@
@staticmethod
def _writexunit(result, outf):
# See http://llg.cubic.org/docs/junit/ for a reference.
- timesd = dict((t[0], t[3]) for t in result.times)
+ timesd = {t[0]: t[3] for t in result.times}
doc = minidom.Document()
s = doc.createElement('testsuite')
s.setAttribute('errors', "0") # TODO
@@ -2944,7 +2960,7 @@
try:
parser = parser or getparser()
options = parseargs(args, parser)
- tests = [_bytespath(a) for a in options.tests]
+ tests = [_sys2bytes(a) for a in options.tests]
if options.test_list is not None:
for listfile in options.test_list:
with open(listfile, 'rb') as f:
@@ -2976,7 +2992,7 @@
testdir = os.path.join(testdir, pathname)
self._testdir = osenvironb[b'TESTDIR'] = testdir
if self.options.outputdir:
- self._outputdir = canonpath(_bytespath(self.options.outputdir))
+ self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
else:
self._outputdir = getcwdb()
if testdescs and pathname:
@@ -2991,9 +3007,15 @@
# we do the randomness ourself to know what seed is used
os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
+ # Rayon (Rust crate for multi-threading) will use all logical CPU cores
+ # by default, causing thrashing on high-cpu-count systems.
+ # Setting its limit to 3 during tests should still let us uncover
+ # multi-threading bugs while keeping the thrashing reasonable.
+ os.environ.setdefault("RAYON_NUM_THREADS", "3")
+
if self.options.tmpdir:
self.options.keep_tmpdir = True
- tmpdir = _bytespath(self.options.tmpdir)
+ tmpdir = _sys2bytes(self.options.tmpdir)
if os.path.exists(tmpdir):
# Meaning of tmpdir has changed since 1.3: we used to create
# HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
@@ -3022,7 +3044,7 @@
os.makedirs(self._tmpbindir)
normbin = os.path.normpath(os.path.abspath(whg))
- normbin = normbin.replace(os.sep.encode('ascii'), b'/')
+ normbin = normbin.replace(_sys2bytes(os.sep), b'/')
# Other Python scripts in the test harness need to
# `import mercurial`. If `hg` is a Python script, we assume
@@ -3071,11 +3093,11 @@
osenvironb[b"BINDIR"] = self._bindir
osenvironb[b"PYTHON"] = PYTHON
- fileb = _bytespath(__file__)
+ fileb = _sys2bytes(__file__)
runtestdir = os.path.abspath(os.path.dirname(fileb))
osenvironb[b'RUNTESTDIR'] = runtestdir
if PYTHON3:
- sepb = _bytespath(os.pathsep)
+ sepb = _sys2bytes(os.pathsep)
else:
sepb = os.pathsep
path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
@@ -3109,6 +3131,13 @@
if self.options.pure:
os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
os.environ["HGMODULEPOLICY"] = "py"
+ if self.options.rust:
+ os.environ["HGMODULEPOLICY"] = "rust+c"
+ if self.options.no_rust:
+ current_policy = os.environ.get("HGMODULEPOLICY", "")
+ if current_policy.startswith("rust+"):
+ os.environ["HGMODULEPOLICY"] = current_policy[len("rust+") :]
+ os.environ.pop("HGWITHRUSTEXT", None)
if self.options.allow_slow_tests:
os.environ["HGTEST_SLOW"] = "slow"
@@ -3135,14 +3164,14 @@
'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
)
- vlog("# Using TESTDIR", _strpath(self._testdir))
- vlog("# Using RUNTESTDIR", _strpath(osenvironb[b'RUNTESTDIR']))
- vlog("# Using HGTMP", _strpath(self._hgtmp))
+ vlog("# Using TESTDIR", _bytes2sys(self._testdir))
+ vlog("# Using RUNTESTDIR", _bytes2sys(osenvironb[b'RUNTESTDIR']))
+ vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
vlog("# Using PATH", os.environ["PATH"])
vlog(
- "# Using", _strpath(IMPL_PATH), _strpath(osenvironb[IMPL_PATH]),
+ "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
)
- vlog("# Writing to directory", _strpath(self._outputdir))
+ vlog("# Writing to directory", _bytes2sys(self._outputdir))
try:
return self._runtests(testdescs) or 0
@@ -3160,7 +3189,7 @@
if self.options.changed:
proc = Popen4(
b'hg st --rev "%s" -man0 .'
- % _bytespath(self.options.changed),
+ % _sys2bytes(self.options.changed),
None,
0,
)
@@ -3354,7 +3383,7 @@
tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
# extra keyword parameters. 'case' is used by .t tests
- kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
+ kwds = {k: testdesc[k] for k in ['case'] if k in testdesc}
t = testcls(
refpath,
@@ -3366,10 +3395,10 @@
timeout=self.options.timeout,
startport=self._getport(count),
extraconfigopts=self.options.extra_config_opt,
- py3warnings=self.options.py3_warnings,
shell=self.options.shell,
hgcommand=self._hgcommand,
usechg=bool(self.options.with_chg or self.options.chg),
+ chgdebug=self.options.chg_debug,
useipv6=useipv6,
**kwds
)
@@ -3381,7 +3410,7 @@
if self.options.keep_tmpdir:
return
- vlog("# Cleaning up HGTMP", _strpath(self._hgtmp))
+ vlog("# Cleaning up HGTMP", _bytes2sys(self._hgtmp))
shutil.rmtree(self._hgtmp, True)
for f in self._createdfiles:
try:
@@ -3440,18 +3469,21 @@
compiler = ''
if self.options.compiler:
compiler = '--compiler ' + self.options.compiler
+ setup_opts = b""
if self.options.pure:
- pure = b"--pure"
- else:
- pure = b""
+ setup_opts = b"--pure"
+ elif self.options.rust:
+ setup_opts = b"--rust"
+ elif self.options.no_rust:
+ setup_opts = b"--no-rust"
# Run installer in hg root
script = os.path.realpath(sys.argv[0])
exe = sysexecutable
if PYTHON3:
- compiler = _bytespath(compiler)
- script = _bytespath(script)
- exe = _bytespath(exe)
+ compiler = _sys2bytes(compiler)
+ script = _sys2bytes(script)
+ exe = _sys2bytes(exe)
hgroot = os.path.dirname(os.path.dirname(script))
self._hgroot = hgroot
os.chdir(hgroot)
@@ -3463,14 +3495,14 @@
# when they happen.
nohome = b''
cmd = (
- b'"%(exe)s" setup.py %(pure)s clean --all'
+ b'"%(exe)s" setup.py %(setup_opts)s clean --all'
b' build %(compiler)s --build-base="%(base)s"'
b' install --force --prefix="%(prefix)s"'
b' --install-lib="%(libdir)s"'
b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
% {
b'exe': exe,
- b'pure': pure,
+ b'setup_opts': setup_opts,
b'compiler': compiler,
b'base': os.path.join(self._hgtmp, b"build"),
b'prefix': self._installdir,
@@ -3493,7 +3525,7 @@
makedirs(self._bindir)
vlog("# Running", cmd.decode("utf-8"))
- if subprocess.call(_strpath(cmd), shell=True) == 0:
+ if subprocess.call(_bytes2sys(cmd), shell=True) == 0:
if not self.options.verbose:
try:
os.remove(installerrs)
@@ -3512,15 +3544,6 @@
self._usecorrectpython()
- if self.options.py3_warnings and not self.options.anycoverage:
- vlog("# Updating hg command to enable Py3k Warnings switch")
- with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
- lines = [line.rstrip() for line in f]
- lines[0] += ' -3'
- with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
- for line in lines:
- f.write(line + '\n')
-
hgbat = os.path.join(self._bindir, b'hg.bat')
if os.path.isfile(hgbat):
# hg.bat expects to be put in bin/scripts while run-tests.py
@@ -3582,7 +3605,7 @@
cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
cmd = cmd % PYTHON
if PYTHON3:
- cmd = _strpath(cmd)
+ cmd = _bytes2sys(cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
out, err = p.communicate()
@@ -3628,33 +3651,33 @@
# chdir is the easiest way to get short, relative paths in the
# output.
os.chdir(self._hgroot)
- covdir = os.path.join(_strpath(self._installdir), '..', 'coverage')
+ covdir = os.path.join(_bytes2sys(self._installdir), '..', 'coverage')
cov = coverage(data_file=os.path.join(covdir, 'cov'))
# Map install directory paths back to source directory.
- cov.config.paths['srcdir'] = ['.', _strpath(self._pythondir)]
+ cov.config.paths['srcdir'] = ['.', _bytes2sys(self._pythondir)]
cov.combine()
omit = [
- _strpath(os.path.join(x, b'*'))
+ _bytes2sys(os.path.join(x, b'*'))
for x in [self._bindir, self._testdir]
]
cov.report(ignore_errors=True, omit=omit)
if self.options.htmlcov:
- htmldir = os.path.join(_strpath(self._outputdir), 'htmlcov')
+ htmldir = os.path.join(_bytes2sys(self._outputdir), 'htmlcov')
cov.html_report(directory=htmldir, omit=omit)
if self.options.annotate:
- adir = os.path.join(_strpath(self._outputdir), 'annotated')
+ adir = os.path.join(_bytes2sys(self._outputdir), 'annotated')
if not os.path.isdir(adir):
os.mkdir(adir)
cov.annotate(directory=adir, omit=omit)
def _findprogram(self, program):
"""Search PATH for a executable program"""
- dpb = _bytespath(os.defpath)
- sepb = _bytespath(os.pathsep)
+ dpb = _sys2bytes(os.defpath)
+ sepb = _sys2bytes(os.pathsep)
for p in osenvironb.get(b'PATH', dpb).split(sepb):
name = os.path.join(p, program)
if os.name == 'nt' or os.access(name, os.X_OK):
@@ -3669,7 +3692,7 @@
found = self._findprogram(p)
p = p.decode("utf-8")
if found:
- vlog("# Found prerequisite", p, "at", _strpath(found))
+ vlog("# Found prerequisite", p, "at", _bytes2sys(found))
else:
print("WARNING: Did not find prerequisite tool: %s " % p)
--- a/tests/simplestorerepo.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/simplestorerepo.py Thu Apr 16 22:51:09 2020 +0530
@@ -588,7 +588,7 @@
start = nullid
if stop is None:
stop = []
- stoprevs = set([self.rev(n) for n in stop])
+ stoprevs = {self.rev(n) for n in stop}
startrev = self.rev(start)
reachable = {startrev}
heads = {startrev}
--- a/tests/test-acl.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-acl.t Thu Apr 16 22:51:09 2020 +0530
@@ -109,17 +109,17 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -167,6 +167,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -174,18 +175,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -234,6 +236,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -241,18 +244,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -311,6 +315,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -318,18 +323,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -379,6 +385,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -386,18 +393,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -452,6 +460,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -459,18 +468,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -522,6 +532,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -529,18 +540,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -597,6 +609,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -604,18 +617,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -669,6 +683,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -676,18 +691,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -742,27 +758,29 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
1 changesets found
list of changesets:
ef1ea85a6374b77d6da9dcda9541f498f2d17df7
bundle2-output-bundle: "HG20", 7 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:bookmarks" 37 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-output-part: "bookmarks" 37 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:bookmarks" supported
bundle2-input-part: total payload size 37
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -828,27 +846,29 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
1 changesets found
list of changesets:
ef1ea85a6374b77d6da9dcda9541f498f2d17df7
bundle2-output-bundle: "HG20", 7 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:bookmarks" 37 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-output-part: "bookmarks" 37 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:bookmarks" supported
bundle2-input-part: total payload size 37
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -914,6 +934,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -921,18 +942,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -998,6 +1020,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1005,18 +1028,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1079,6 +1103,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1086,18 +1111,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1155,6 +1181,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1162,18 +1189,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1242,6 +1270,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1249,18 +1278,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1330,6 +1360,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1337,18 +1368,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1414,6 +1446,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1421,18 +1454,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1494,6 +1528,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1501,18 +1536,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1579,6 +1615,7 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
+ invalid branch cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1586,18 +1623,19 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
+ invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1707,18 +1745,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1793,18 +1831,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1871,18 +1909,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1945,18 +1983,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2013,18 +2051,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2104,18 +2142,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2194,18 +2232,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2267,18 +2305,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2351,18 +2389,18 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 205
+ bundle2-input-part: total payload size 224
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 48
- bundle2-input-part: "check:heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: "check:updated-heads" supported
+ bundle2-input-part: total payload size 40
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
--- a/tests/test-archive.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-archive.t Thu Apr 16 22:51:09 2020 +0530
@@ -132,7 +132,8 @@
transfer-encoding: chunked
body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
- body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 !)
+ body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
+ body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
% tar.bz2 and zip disallowed should both give 403
403 Archive type not allowed: bz2
content-type: text/html; charset=ascii
@@ -161,7 +162,8 @@
transfer-encoding: chunked
body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
- body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 !)
+ body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
+ body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
% zip and tar.gz disallowed should both give 403
403 Archive type not allowed: zip
content-type: text/html; charset=ascii
@@ -221,7 +223,8 @@
transfer-encoding: chunked
body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
- body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 !)
+ body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
+ body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
% tar.bz2 and zip disallowed should both give 403
403 Archive type not allowed: bz2
content-type: text/html; charset=ascii
@@ -250,7 +253,8 @@
transfer-encoding: chunked
body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
- body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 !)
+ body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
+ body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
% zip and tar.gz disallowed should both give 403
403 Archive type not allowed: zip
content-type: text/html; charset=ascii
--- a/tests/test-audit-subrepo.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-audit-subrepo.t Thu Apr 16 22:51:09 2020 +0530
@@ -281,24 +281,36 @@
$ hg clone -q main main2
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main2
+ $ ls -A main2
+ .hg
+ .hgsub
+ .hgsubstate
$ SUB=sub1 hg clone -q main main3
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main3
+ $ ls -A main3
+ .hg
+ .hgsub
+ .hgsubstate
$ SUB=sub2 hg clone -q main main4
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main4
+ $ ls -A main4
+ .hg
+ .hgsub
+ .hgsubstate
on clone empty subrepo into .hg, then pull (and update), which at least fails:
$ SUB=.hg hg clone -qr0 main main5
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main5
+ $ ls -A main5
+ .hg
+ .hgsub
+ .hgsubstate
$ test -d main5/.hg/.hg
[1]
$ SUB=.hg hg -R main5 pull -u
@@ -320,7 +332,10 @@
$ SUB=.hg hg clone -q main main6
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main6
+ $ ls -A main6
+ .hg
+ .hgsub
+ .hgsubstate
$ cat main6/.hg/hgrc | grep pwned
[1]
@@ -329,7 +344,10 @@
$ SUB=.hg/foo hg clone -q main main7
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main7
+ $ ls -A main7
+ .hg
+ .hgsub
+ .hgsubstate
$ test -d main7/.hg/.hg
[1]
@@ -338,7 +356,10 @@
$ SUB=../out-of-tree-write hg clone -q main main8
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main8
+ $ ls -A main8
+ .hg
+ .hgsub
+ .hgsubstate
on clone (and update) into e.g. $HOME, which doesn't work since subrepo paths
are concatenated prior to variable expansion:
@@ -346,8 +367,8 @@
$ SUB="$TESTTMP/envvar/fakehome" hg clone -q main main9
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls main9 | wc -l
- \s*0 (re)
+ $ ls -A main9 | wc -l
+ \s*3 (re)
$ ls
main
@@ -618,7 +639,8 @@
$ SUB="$FAKEHOME" hg clone -qr1 main main6
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls "$FAKEHOME"
+ $ ls -A "$FAKEHOME"
+ .hg
a
$ SUB="$FAKEHOME" hg -R main6 pull -u
pulling from $TESTTMP/envvarsym/main
@@ -631,7 +653,8 @@
.hgsubstate: untracked file differs
abort: untracked files in working directory differ from files in requested revision
[255]
- $ ls "$FAKEHOME"
+ $ ls -A "$FAKEHOME"
+ .hg
a
on clone only symlink with hg-managed fakehome directory,
@@ -642,7 +665,8 @@
$ touch "$FAKEHOME/a"
$ hg -R "$FAKEHOME" ci -qAm 'add fakehome file'
$ SUB="$FAKEHOME" hg clone -qr0 main main7
- $ ls "$FAKEHOME"
+ $ ls -A "$FAKEHOME"
+ .hg
a
$ SUB="$FAKEHOME" hg -R main7 pull -uf
pulling from $TESTTMP/envvarsym/main
@@ -654,7 +678,8 @@
new changesets * (glob)
abort: subrepo path contains illegal component: $SUB
[255]
- $ ls "$FAKEHOME"
+ $ ls -A "$FAKEHOME"
+ .hg
a
$ cd ..
@@ -698,7 +723,10 @@
#else
$ hg clone -q driveletter driveletter2
- $ ls driveletter2
+ $ ls -A driveletter2
+ .hg
+ .hgsub
+ .hgsubstate
X:
#endif
--- a/tests/test-backout.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-backout.t Thu Apr 16 22:51:09 2020 +0530
@@ -86,6 +86,33 @@
commit: 1 unresolved (clean)
update: (current)
phases: 5 draft
+ $ hg log -G
+ @ changeset: 4:ed99997b793d
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:05 1970 +0000
+ | summary: ypples
+ |
+ o changeset: 3:1c2161e97c0a
+ | user: test
+ | date: Thu Jan 01 00:00:04 1970 +0000
+ | summary: Backed out changeset 22cb4f70d813
+ |
+ o changeset: 2:a8c6e511cfee
+ | user: test
+ | date: Thu Jan 01 00:00:02 1970 +0000
+ | summary: grapes
+ |
+ % changeset: 1:22cb4f70d813
+ | user: test
+ | date: Thu Jan 01 00:00:01 1970 +0000
+ | summary: chair
+ |
+ o changeset: 0:a5cb2dde5805
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: tomatoes
+
file that was removed is recreated
(this also tests that editor is not invoked if the commit message is
@@ -682,23 +709,24 @@
use 'hg resolve' to retry unresolved file merges
[1]
$ hg status
- $ hg debugmergestate
- * version 2 records
+ $ hg debugmergestate -v
+ v1 and v2 states match: using v2
local: b71750c4b0fdf719734971e3ef90dbeab5919a2d
other: a30dd8addae3ce71b8667868478542bc417439e6
- file extras: foo (ancestorlinknode = 91360952243723bd5b1138d5f26bd8c8564cb553)
- file: foo (record type "F", state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33)
- local path: foo (flags "")
+ file: foo (state "u")
+ local path: foo (hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33, flags "")
ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708)
other path: foo (node f50039b486d6fa1a90ae51778388cad161f425ee)
+ extra: ancestorlinknode = 91360952243723bd5b1138d5f26bd8c8564cb553
$ mv .hg/merge/state2 .hg/merge/state2-moved
- $ hg debugmergestate
- * version 1 records
+ $ hg debugmergestate -v
+ no version 2 merge state
local: b71750c4b0fdf719734971e3ef90dbeab5919a2d
- file: foo (record type "F", state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33)
- local path: foo (flags "")
+ other: b71750c4b0fdf719734971e3ef90dbeab5919a2d
+ file: foo (state "u")
+ local path: foo (hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33, flags "")
ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708)
- other path: foo (node not stored in v1 format)
+ other path: (node foo)
$ mv .hg/merge/state2-moved .hg/merge/state2
$ hg resolve -l # still unresolved
U foo
@@ -709,6 +737,23 @@
commit: 1 unresolved (clean)
update: (current)
phases: 3 draft
+ $ hg log -G
+ @ changeset: 2:b71750c4b0fd
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: capital ten
+ |
+ o changeset: 1:913609522437
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: capital three
+ |
+ % changeset: 0:a30dd8addae3
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: initial
+
$ hg resolve --all --debug
picked tool ':merge' for foo (binary False symlink False changedelete False)
merging foo
--- a/tests/test-backwards-remove.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-backwards-remove.t Thu Apr 16 22:51:09 2020 +0530
@@ -2,7 +2,8 @@
$ echo This is file a1 > a
$ hg add a
$ hg commit -m "commit #0"
- $ ls
+ $ ls -A
+ .hg
a
$ echo This is file b1 > b
$ hg add b
@@ -12,5 +13,6 @@
B should disappear
- $ ls
+ $ ls -A
+ .hg
a
--- a/tests/test-bad-extension.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-bad-extension.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,3 +1,4 @@
+#require no-chg
$ filterlog () {
> sed -e 's!^[0-9/]* [0-9:]* ([0-9]*)>!YYYY/MM/DD HH:MM:SS (PID)>!'
> }
--- a/tests/test-bisect.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-bisect.t Thu Apr 16 22:51:09 2020 +0530
@@ -564,7 +564,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg bisect --reset
$ hg up -C .
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-bookmarks-pushpull.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-bookmarks-pushpull.t Thu Apr 16 22:51:09 2020 +0530
@@ -129,10 +129,10 @@
bundle2-output: bundle parameter:
bundle2-output: start of parts
bundle2-output: bundle part: "replycaps"
- bundle2-output-part: "replycaps" 222 bytes payload
+ bundle2-output-part: "replycaps" 241 bytes payload
bundle2-output: part 0: "REPLYCAPS"
bundle2-output: header chunk size: 16
- bundle2-output: payload chunk size: 222
+ bundle2-output: payload chunk size: 241
bundle2-output: closing payload chunk
bundle2-output: bundle part: "check:bookmarks"
bundle2-output-part: "check:bookmarks" 23 bytes payload
@@ -162,9 +162,9 @@
bundle2-input: part parameters: 0
bundle2-input: found a handler for part replycaps
bundle2-input-part: "replycaps" supported
- bundle2-input: payload chunk size: 222
+ bundle2-input: payload chunk size: 241
bundle2-input: payload chunk size: 0
- bundle2-input-part: total payload size 222
+ bundle2-input-part: total payload size 241
bundle2-input: part header size: 22
bundle2-input: part type: "CHECK:BOOKMARKS"
bundle2-input: part id: "1"
@@ -241,10 +241,10 @@
bundle2-output: bundle parameter:
bundle2-output: start of parts
bundle2-output: bundle part: "replycaps"
- bundle2-output-part: "replycaps" 222 bytes payload
+ bundle2-output-part: "replycaps" 241 bytes payload
bundle2-output: part 0: "REPLYCAPS"
bundle2-output: header chunk size: 16
- bundle2-output: payload chunk size: 222
+ bundle2-output: payload chunk size: 241
bundle2-output: closing payload chunk
bundle2-output: bundle part: "check:bookmarks"
bundle2-output-part: "check:bookmarks" 23 bytes payload
@@ -275,9 +275,9 @@
bundle2-input: part parameters: 0
bundle2-input: found a handler for part replycaps
bundle2-input-part: "replycaps" supported
- bundle2-input: payload chunk size: 222
+ bundle2-input: payload chunk size: 241
bundle2-input: payload chunk size: 0
- bundle2-input-part: total payload size 222
+ bundle2-input-part: total payload size 241
bundle2-input: part header size: 22
bundle2-input: part type: "CHECK:BOOKMARKS"
bundle2-input: part id: "1"
@@ -328,6 +328,17 @@
#endif
+Divergent bookmark cannot be exported
+
+ $ hg book W@default
+ $ hg push -B W@default ../a
+ pushing to ../a
+ searching for changes
+ cannot push divergent bookmark W@default!
+ no changes found
+ [2]
+ $ hg book -d W@default
+
export the active bookmark
$ hg bookmark V
--- a/tests/test-check-format.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-check-format.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,5 +1,5 @@
#require black
$ cd $RUNTESTDIR/..
- $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/** - "contrib/python-zstandard/**"'`
+ $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'`
--- a/tests/test-check-interfaces.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-check-interfaces.py Thu Apr 16 22:51:09 2020 +0530
@@ -211,6 +211,7 @@
ziverify.verifyClass(repository.ifilestorage, filelog.filelog)
ziverify.verifyClass(repository.imanifestdict, manifest.manifestdict)
+ ziverify.verifyClass(repository.imanifestdict, manifest.treemanifest)
ziverify.verifyClass(
repository.imanifestrevisionstored, manifest.manifestctx
)
@@ -252,7 +253,6 @@
checkzobject(mctx)
# Conforms to imanifestrevisionwritable.
- checkzobject(mctx.new())
checkzobject(mctx.copy())
# Conforms to imanifestdict.
--- a/tests/test-check-module-imports.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-check-module-imports.t Thu Apr 16 22:51:09 2020 +0530
@@ -24,6 +24,7 @@
> -X contrib/packaging/hg-docker \
> -X contrib/packaging/hgpackaging/ \
> -X contrib/packaging/inno/ \
+ > -X contrib/phab-clean.py \
> -X contrib/python-zstandard/ \
> -X contrib/win32/hgwebdir_wsgi.py \
> -X contrib/perf-utils/perf-revlog-write-plot.py \
--- a/tests/test-check-pyflakes.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-check-pyflakes.t Thu Apr 16 22:51:09 2020 +0530
@@ -20,7 +20,7 @@
> -X mercurial/thirdparty/concurrent \
> -X mercurial/thirdparty/zope \
> 2>/dev/null \
- > | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
+ > | xargs $PYTHON -m pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
contrib/perf.py:*: undefined name 'xrange' (glob) (?)
mercurial/hgweb/server.py:*: undefined name 'reload' (glob) (?)
mercurial/util.py:*: undefined name 'file' (glob) (?)
--- a/tests/test-chg.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-chg.t Thu Apr 16 22:51:09 2020 +0530
@@ -382,8 +382,8 @@
YYYY/MM/DD HH:MM:SS (PID)> log -R cached
YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
-Test that chg works even when python "coerces" the locale (py3.7+, which is done
-by default if none of LC_ALL, LC_CTYPE, or LANG are set in the environment)
+Test that chg works (sets to the user's actual LC_CTYPE) even when python
+"coerces" the locale (py3.7+)
$ cat > $TESTTMP/debugenv.py <<EOF
> from mercurial import encoding
@@ -397,9 +397,22 @@
> if v is not None:
> ui.write(b'%s=%s\n' % (k, encoding.environ[k]))
> EOF
+(hg keeps python's modified LC_CTYPE, chg doesn't)
+ $ (unset LC_ALL; unset LANG; LC_CTYPE= "$CHGHG" \
+ > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
+ LC_CTYPE=C.UTF-8 (py37 !)
+ LC_CTYPE= (no-py37 !)
+ $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
+ > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
+ LC_CTYPE=
+ $ (unset LC_ALL; unset LANG; LC_CTYPE=unsupported_value chg \
+ > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
+ LC_CTYPE=unsupported_value
+ $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
+ > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
+ LC_CTYPE=
$ LANG= LC_ALL= LC_CTYPE= chg \
> --config extensions.debugenv=$TESTTMP/debugenv.py debugenv
LC_ALL=
- LC_CTYPE=C.UTF-8 (py37 !)
- LC_CTYPE= (no-py37 !)
+ LC_CTYPE=
LANG=
--- a/tests/test-clone-uncompressed.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-clone-uncompressed.t Thu Apr 16 22:51:09 2020 +0530
@@ -54,6 +54,8 @@
changegroup
01
02
+ checkheads
+ related
digests
md5
sha1
@@ -120,6 +122,8 @@
changegroup
01
02
+ checkheads
+ related
digests
md5
sha1
--- a/tests/test-clone.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-clone.t Thu Apr 16 22:51:09 2020 +0530
@@ -1097,7 +1097,8 @@
no changes found
adding remote bookmark bookA
- $ ls share-1anowc
+ $ ls -A share-1anowc
+ .hg
Test that auto sharing doesn't cause failure of "hg clone local remote"
--- a/tests/test-clonebundles.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-clonebundles.t Thu Apr 16 22:51:09 2020 +0530
@@ -53,7 +53,7 @@
$ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
$ hg clone http://localhost:$HGPORT 404-url
applying clone bundle from http://does.not.exist/bundle.hg
- error fetching bundle: (.* not known|(\[Errno -?\d+] )?([Nn]o address associated with (host)?name|Temporary failure in name resolution)) (re) (no-windows !)
+ error fetching bundle: (.* not known|(\[Errno -?\d+] )?([Nn]o address associated with (host)?name|Temporary failure in name resolution|Name does not resolve)) (re) (no-windows !)
error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
abort: error applying bundle
(if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
@@ -455,6 +455,19 @@
no changes found
2 local changesets published
+Test a bad attribute list
+
+ $ hg --config ui.clonebundleprefers=bad clone -U http://localhost:$HGPORT bad-input
+ abort: invalid ui.clonebundleprefers item: bad
+ (each comma separated item should be key=value pairs)
+ [255]
+ $ hg --config ui.clonebundleprefers=key=val,bad,key2=val2 clone \
+ > -U http://localhost:$HGPORT bad-input
+ abort: invalid ui.clonebundleprefers item: bad
+ (each comma separated item should be key=value pairs)
+ [255]
+
+
Test interaction between clone bundles and --stream
A manifest with just a gzip bundle
--- a/tests/test-commit-unresolved.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-commit-unresolved.t Thu Apr 16 22:51:09 2020 +0530
@@ -60,7 +60,7 @@
abort: cannot specify a node with --abort
[255]
$ hg merge --abort --rev e4501
- abort: cannot specify both --rev and --abort
+ abort: cannot specify both --abort and --rev
[255]
#if abortcommand
@@ -144,7 +144,7 @@
(branch merge, don't forget to commit)
$ hg merge --preview --abort
- abort: cannot specify --preview with --abort
+ abort: cannot specify both --abort and --preview
[255]
$ hg abort
--- a/tests/test-completion.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-completion.t Thu Apr 16 22:51:09 2020 +0530
@@ -75,6 +75,7 @@
$ hg debugcomplete debug
debugancestor
debugapplystreamclonebundle
+ debugbackupbundle
debugbuilddag
debugbundle
debugcapabilities
@@ -107,6 +108,7 @@
debugmanifestfulltextcache
debugmergestate
debugnamecomplete
+ debugnodemap
debugobsolete
debugp1copies
debugp2copies
@@ -128,6 +130,7 @@
debugssl
debugsub
debugsuccessorssets
+ debugtagscache
debugtemplate
debuguigetpass
debuguiprompt
@@ -255,9 +258,10 @@
commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
config: untrusted, edit, local, global, template
continue: dry-run
- copy: after, force, include, exclude, dry-run
+ copy: forget, after, at-rev, force, include, exclude, dry-run
debugancestor:
debugapplystreamclonebundle:
+ debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
debugbuilddag: mergeable-file, overwritten-file, new-file
debugbundle: all, part-type, spec
debugcapabilities:
@@ -287,8 +291,9 @@
debuglabelcomplete:
debuglocks: force-lock, force-wlock, set-lock, set-wlock
debugmanifestfulltextcache: clear, add
- debugmergestate:
+ debugmergestate: style, template
debugnamecomplete:
+ debugnodemap: dump-new, dump-disk, check, metadata
debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
debugp1copies: rev
debugp2copies: rev
@@ -310,6 +315,7 @@
debugssl:
debugsub: rev
debugsuccessorssets: closest
+ debugtagscache:
debugtemplate: rev, define
debuguigetpass: prompt
debuguiprompt: prompt
@@ -339,7 +345,7 @@
parents: rev, style, template
paths: template
phase: public, draft, secret, force, rev
- pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
+ pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
recover: verify
remove: after, force, subrepos, include, exclude, dry-run
--- a/tests/test-convert-cvs-branch.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-cvs-branch.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require cvs
+#require cvs no-root
This is https://bz.mercurial-scm.org/1148
and https://bz.mercurial-scm.org/1447
--- a/tests/test-convert-cvs-detectmerge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-cvs-detectmerge.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require cvs
+#require cvs no-root
Test config convert.cvsps.mergefrom config setting.
(Should test similar mergeto feature, but I don't understand it yet.)
--- a/tests/test-convert-cvs-synthetic.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-cvs-synthetic.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require cvs112
+#require cvs112 no-root
This feature requires use of builtin cvsps!
--- a/tests/test-convert-cvs.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-cvs.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require cvs
+#require cvs no-root
$ cvscall()
> {
--- a/tests/test-convert-cvsnt-mergepoints.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-cvsnt-mergepoints.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require cvs
+#require cvs no-root
$ filterpath()
> {
--- a/tests/test-convert-hg-source.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-convert-hg-source.t Thu Apr 16 22:51:09 2020 +0530
@@ -62,9 +62,9 @@
6 make bar and baz copies of foo
5 merge local copy
4 merge remote copy
- 3 Added tag that for changeset 88586c4e9f02
+ 3 Added tag that for changeset 8601262d7472
2 Removed tag that
- 1 Added tag this for changeset c56a7f387039
+ 1 Added tag this for changeset 706614b458c1
0 mark baz executable
updating bookmarks
$ cd new
@@ -76,7 +76,7 @@
#if execbit
$ hg bookmarks
premerge1 3:973ef48a98a4
- premerge2 8:91d107c423ba
+ premerge2 8:c4968fdf2e5d
#else
Different hash because no x bit
$ hg bookmarks
@@ -96,19 +96,19 @@
6 make bar and baz copies of foo
5 merge local copy
4 merge remote copy
- 3 Added tag that for changeset 88586c4e9f02
+ 3 Added tag that for changeset 8601262d7472
2 Removed tag that
- 1 Added tag this for changeset c56a7f387039
+ 1 Added tag this for changeset 706614b458c1
0 mark baz executable
updating bookmarks
$ hg -R new log -G -T '{rev} {desc}'
o 8 mark baz executable
|
- o 7 Added tag this for changeset c56a7f387039
+ o 7 Added tag this for changeset 706614b458c1
|
o 6 Removed tag that
|
- o 5 Added tag that for changeset 88586c4e9f02
+ o 5 Added tag that for changeset 8601262d7472
|
o 4 merge remote copy
|\
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-copies-chain-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,798 @@
+=====================================================
+Test Copy tracing for chain of copies involving merge
+=====================================================
+
+This test files covers copies/rename case for a chains of commit where merges
+are involved. It cheks we do not have unwanted update of behavior and that the
+different options to retrieve copies behave correctly.
+
+Setup
+=====
+
+use git diff to see rename
+
+ $ cat << EOF >> $HGRCPATH
+ > [diff]
+ > git=yes
+ > [ui]
+ > logtemplate={rev} {desc}\n
+ > EOF
+
+ $ hg init repo-chain
+ $ cd repo-chain
+
+Add some linear rename initialy
+
+ $ touch a b h
+ $ hg ci -Am 'i-0 initial commit: a b h'
+ adding a
+ adding b
+ adding h
+ $ hg mv a c
+ $ hg ci -Am 'i-1: a -move-> c'
+ $ hg mv c d
+ $ hg ci -Am 'i-2: c -move-> d'
+ $ hg log -G
+ @ 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+And having another branch with renames on the other side
+
+ $ hg mv d e
+ $ hg ci -Am 'a-1: d -move-> e'
+ $ hg mv e f
+ $ hg ci -Am 'a-2: e -move-> f'
+ $ hg log -G --rev '::.'
+ @ 4 a-2: e -move-> f
+ |
+ o 3 a-1: d -move-> e
+ |
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+Have a branching with nothing on one side
+
+ $ hg up 'desc("i-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo foo > b
+ $ hg ci -m 'b-1: b update'
+ created new head
+ $ hg log -G --rev '::.'
+ @ 5 b-1: b update
+ |
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+Create a branch that delete a file previous renamed
+
+ $ hg up 'desc("i-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rm d
+ $ hg ci -m 'c-1 delete d'
+ created new head
+ $ hg log -G --rev '::.'
+ @ 6 c-1 delete d
+ |
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+Create a branch that delete a file previous renamed and recreate it
+
+ $ hg up 'desc("i-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rm d
+ $ hg ci -m 'd-1 delete d'
+ created new head
+ $ echo bar > d
+ $ hg add d
+ $ hg ci -m 'd-2 re-add d'
+ $ hg log -G --rev '::.'
+ @ 8 d-2 re-add d
+ |
+ o 7 d-1 delete d
+ |
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+Having another branch renaming a different file to the same filename as another
+
+ $ hg up 'desc("i-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg mv b g
+ $ hg ci -m 'e-1 b -move-> g'
+ created new head
+ $ hg mv g f
+ $ hg ci -m 'e-2 g -move-> f'
+ $ hg log -G --rev '::.'
+ @ 10 e-2 g -move-> f
+ |
+ o 9 e-1 b -move-> g
+ |
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+merging with unrelated change does not interfere with the renames
+---------------------------------------------------------------
+
+- rename on one side
+- unrelated change on the other side
+
+ $ hg up 'desc("b-1")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge 'desc("a-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mBAm-0 simple merge - one way'
+ $ hg up 'desc("a-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("b-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mABm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))'
+ @ 12 mABm-0 simple merge - the other way
+ |\
+ +---o 11 mBAm-0 simple merge - one way
+ | |/
+ | o 5 b-1: b update
+ | |
+ o | 4 a-2: e -move-> f
+ | |
+ o | 3 a-1: d -move-> e
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mABm")'
+ A f
+ d
+ R d
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBAm")'
+ A f
+ d
+ R d
+ $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mABm")'
+ M b
+ $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mBAm")'
+ M b
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mABm")'
+ M b
+ A f
+ d
+ R d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mBAm")'
+ M b
+ A f
+ d
+ R d
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mABm")'
+ M b
+ A f
+ a
+ R a
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBAm")'
+ M b
+ A f
+ a
+ R a
+
+merging with the side having a delete
+-------------------------------------
+
+case summary:
+- one with change to an unrelated file
+- one deleting the change
+and recreate an unrelated file after the merge
+
+ $ hg up 'desc("b-1")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge 'desc("c-1")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mBCm-0 simple merge - one way'
+ $ echo bar > d
+ $ hg add d
+ $ hg ci -m 'mBCm-1 re-add d'
+ $ hg up 'desc("c-1")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge 'desc("b-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mCBm-0 simple merge - the other way'
+ created new head
+ $ echo bar > d
+ $ hg add d
+ $ hg ci -m 'mCBm-1 re-add d'
+ $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))'
+ @ 16 mCBm-1 re-add d
+ |
+ o 15 mCBm-0 simple merge - the other way
+ |\
+ | | o 14 mBCm-1 re-add d
+ | | |
+ +---o 13 mBCm-0 simple merge - one way
+ | |/
+ | o 6 c-1 delete d
+ | |
+ o | 5 b-1: b update
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+- comparing from the merge
+
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBCm-0")'
+ R d
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mCBm-0")'
+ R d
+ $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mBCm-0")'
+ M b
+ $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCBm-0")'
+ M b
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mBCm-0")'
+ M b
+ R d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mCBm-0")'
+ M b
+ R d
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-0")'
+ M b
+ R a
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-0")'
+ M b
+ R a
+
+- comparing with the merge children re-adding the file
+
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBCm-1")'
+ M d
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mCBm-1")'
+ M d
+ $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mBCm-1")'
+ M b
+ A d
+ $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCBm-1")'
+ M b
+ A d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mBCm-1")'
+ M b
+ M d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mCBm-1")'
+ M b
+ M d
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-1")'
+ M b
+ A d
+ R a
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-1")'
+ M b
+ A d
+ R a
+
+Comparing with a merge re-adding the file afterward
+---------------------------------------------------
+
+Merge:
+- one with change to an unrelated file
+- one deleting and recreating the change
+
+Note:
+| In this case, one of the merge wrongly record a merge while there is none.
+| This lead to bad copy tracing information to be dug up.
+
+ $ hg up 'desc("b-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("d-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mBDm-0 simple merge - one way'
+ $ hg up 'desc("d-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("b-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mDBm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))'
+ @ 18 mDBm-0 simple merge - the other way
+ |\
+ +---o 17 mBDm-0 simple merge - one way
+ | |/
+ | o 8 d-2 re-add d
+ | |
+ | o 7 d-1 delete d
+ | |
+ o | 5 b-1: b update
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBDm-0")'
+ M d
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mDBm-0")'
+ M d
+ $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mBDm-0")'
+ M b
+ $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mDBm-0")'
+ M b
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mBDm-0")'
+ M b
+ M d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mDBm-0")'
+ M b
+ M d
+
+The bugs makes recorded copy is different depending of where we started the merge from since
+
+ $ hg manifest --debug --rev 'desc("mBDm-0")' | grep '644 d'
+ b004912a8510032a0350a74daa2803dadfb00e12 644 d
+ $ hg manifest --debug --rev 'desc("mDBm-0")' | grep '644 d'
+ b004912a8510032a0350a74daa2803dadfb00e12 644 d
+
+The 0bb5445dc4d02f4e0d86cf16f9f3a411d0f17744 entry is wrong, since the file was
+deleted on one side (then recreate) and untouched on the other side, no "merge"
+has happened. The resulting `d` file is the untouched version from branch `D`,
+not a merge.
+
+ $ hg manifest --debug --rev 'desc("d-2")' | grep '644 d'
+ b004912a8510032a0350a74daa2803dadfb00e12 644 d
+ $ hg manifest --debug --rev 'desc("b-1")' | grep '644 d'
+ 01c2f5eabdc4ce2bdee42b5f86311955e6c8f573 644 d
+ $ hg debugindex d
+ rev linkrev nodeid p1 p2
+ 0 2 01c2f5eabdc4 000000000000 000000000000
+ 1 8 b004912a8510 000000000000 000000000000
+
+(This `hg log` output if wrong, since no merge actually happened).
+
+ $ hg log -Gfr 'desc("mBDm-0")' d
+ o 8 d-2 re-add d
+ |
+ ~
+
+This `hg log` output is correct
+
+ $ hg log -Gfr 'desc("mDBm-0")' d
+ o 8 d-2 re-add d
+ |
+ ~
+
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBDm-0")'
+ M b
+ A d
+ R a
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDBm-0")'
+ M b
+ A d
+ R a
+
+
+Comparing with a merge with colliding rename
+--------------------------------------------
+
+- the "e-" branch renaming b to f (through 'g')
+- the "a-" branch renaming d to f (through e)
+
+ $ hg up 'desc("a-2")'
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge 'desc("e-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mAEm-0 simple merge - one way'
+ $ hg up 'desc("e-2")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("a-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mEAm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))'
+ @ 20 mEAm-0 simple merge - the other way
+ |\
+ +---o 19 mAEm-0 simple merge - one way
+ | |/
+ | o 10 e-2 g -move-> f
+ | |
+ | o 9 e-1 b -move-> g
+ | |
+ o | 4 a-2: e -move-> f
+ | |
+ o | 3 a-1: d -move-> e
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+ $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f'
+ eb806e34ef6be4c264effd5933d31004ad15a793 644 f
+ $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f'
+ eb806e34ef6be4c264effd5933d31004ad15a793 644 f
+ $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f'
+ 0dd616bc7ab1a111921d95d76f69cda5c2ac539c 644 f
+ $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f'
+ 6da5a2eecb9c833f830b67a4972366d49a9a142c 644 f
+ $ hg debugindex f
+ rev linkrev nodeid p1 p2
+ 0 4 0dd616bc7ab1 000000000000 000000000000
+ 1 10 6da5a2eecb9c 000000000000 000000000000
+ 2 19 eb806e34ef6b 0dd616bc7ab1 6da5a2eecb9c
+ $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mAEm-0")'
+ M f
+ R b
+ $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mEAm-0")'
+ M f
+ R b
+ $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mAEm-0")'
+ M f
+ R d
+ $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mEAm-0")'
+ M f
+ R d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("a-2")'
+ A f
+ d
+ R d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("e-2")'
+ A f
+ b
+ R b
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mAEm-0")'
+ A f
+ d
+ R b
+ R d
+ $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mEAm-0")'
+ A f
+ d
+ R b
+ R d
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAEm-0")'
+ A f
+ a
+ R a
+ R b
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEAm-0")'
+ A f
+ a
+ R a
+ R b
+
+
+Note:
+| In this case, one of the merge wrongly record a merge while there is none.
+| This lead to bad copy tracing information to be dug up.
+
+
+Merge:
+- one with change to an unrelated file (b)
+- one overwriting a file (d) with a rename (from h to i to d)
+
+ $ hg up 'desc("i-2")'
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg mv h i
+ $ hg commit -m "f-1: rename h -> i"
+ created new head
+ $ hg mv --force i d
+ $ hg commit -m "f-2: rename i -> d"
+ $ hg debugindex d
+ rev linkrev nodeid p1 p2
+ 0 2 01c2f5eabdc4 000000000000 000000000000
+ 1 8 b004912a8510 000000000000 000000000000
+ 2 22 c72365ee036f 000000000000 000000000000
+ $ hg up 'desc("b-1")'
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("f-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mBFm-0 simple merge - one way'
+ $ hg up 'desc("f-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("b-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mFBm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))'
+ @ 24 mFBm-0 simple merge - the other way
+ |\
+ +---o 23 mBFm-0 simple merge - one way
+ | |/
+ | o 22 f-2: rename i -> d
+ | |
+ | o 21 f-1: rename h -> i
+ | |
+ o | 5 b-1: b update
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+The overwriting should take over. However, the behavior is currently buggy
+
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBFm-0")'
+ M b
+ A d
+ h
+ h (false !)
+ R a
+ R h
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFBm-0")'
+ M b
+ A d
+ h
+ R a
+ R h
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBFm-0")'
+ M d
+ R h
+ $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mBFm-0")'
+ M b
+ $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mBFm-0")'
+ M b
+ M d
+ R i
+ $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mFBm-0")'
+ M d
+ R h
+ $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFBm-0")'
+ M b
+ $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFBm-0")'
+ M b
+ M d
+ R i
+
+The following graphlog is wrong, the "a -> c -> d" chain was overwritten and should not appear.
+
+ $ hg log -Gfr 'desc("mBFm-0")' d
+ o 22 f-2: rename i -> d
+ |
+ o 21 f-1: rename h -> i
+ :
+ o 0 i-0 initial commit: a b h
+
+
+The following output is correct.
+
+ $ hg log -Gfr 'desc("mFBm-0")' d
+ o 22 f-2: rename i -> d
+ |
+ o 21 f-1: rename h -> i
+ :
+ o 0 i-0 initial commit: a b h
+
+
+
+Merge:
+- one with change to a file
+- one deleting and recreating the file
+
+Unlike in the 'BD/DB' cases, an actual merge happened here. So we should
+consider history and rename on both branch of the merge.
+
+ $ hg up 'desc("i-2")'
+ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo "some update" >> d
+ $ hg commit -m "g-1: update d"
+ created new head
+ $ hg up 'desc("d-2")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("g-1")' --tool :union
+ merging d
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mDGm-0 simple merge - one way'
+ $ hg up 'desc("g-1")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("d-2")' --tool :union
+ merging d
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mGDm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))'
+ @ 27 mGDm-0 simple merge - the other way
+ |\
+ +---o 26 mDGm-0 simple merge - one way
+ | |/
+ | o 25 g-1: update d
+ | |
+ o | 8 d-2 re-add d
+ | |
+ o | 7 d-1 delete d
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDGm-0")'
+ A d
+ a
+ R a
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGDm-0")'
+ A d
+ a
+ R a
+ $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mDGm-0")'
+ M d
+ $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mGDm-0")'
+ M d
+ $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mDGm-0")'
+ M d
+ $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mGDm-0")'
+ M d
+
+ $ hg log -Gfr 'desc("mDGm-0")' d
+ o 26 mDGm-0 simple merge - one way
+ |\
+ | o 25 g-1: update d
+ | |
+ o | 8 d-2 re-add d
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+
+ $ hg log -Gfr 'desc("mDGm-0")' d
+ o 26 mDGm-0 simple merge - one way
+ |\
+ | o 25 g-1: update d
+ | |
+ o | 8 d-2 re-add d
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+
+Merge:
+- one with change to a file (d)
+- one overwriting that file with a rename (from h to i, to d)
+
+This case is similar to BF/FB, but an actual merge happens, so both side of the
+history are relevant.
+
+Note:
+| In this case, the merge get conflicting information since on one side we have
+| "a -> c -> d". and one the other one we have "h -> i -> d".
+|
+| The current code arbitrarily pick one side
+
+ $ hg up 'desc("f-2")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge 'desc("g-1")' --tool :union
+ merging d
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mFGm-0 simple merge - one way'
+ created new head
+ $ hg up 'desc("g-1")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 'desc("f-2")' --tool :union
+ merging d
+ 0 files updated, 1 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'mGFm-0 simple merge - the other way'
+ created new head
+ $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))'
+ @ 29 mGFm-0 simple merge - the other way
+ |\
+ +---o 28 mFGm-0 simple merge - one way
+ | |/
+ | o 25 g-1: update d
+ | |
+ o | 22 f-2: rename i -> d
+ | |
+ o | 21 f-1: rename h -> i
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm-0")'
+ A d
+ a
+ R a
+ R h
+ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm-0")'
+ A d
+ a
+ R a
+ R h
+ $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFGm-0")'
+ M d
+ $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mGFm-0")'
+ M d
+ $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFGm-0")'
+ M d
+ R i
+ $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mGFm-0")'
+ M d
+ R i
+ $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mFGm-0")'
+ M d
+ R h
+ $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mGFm-0")'
+ M d
+ R h
+
+ $ hg log -Gfr 'desc("mFGm-0")' d
+ o 28 mFGm-0 simple merge - one way
+ |\
+ | o 25 g-1: update d
+ | |
+ o | 22 f-2: rename i -> d
+ | |
+ o | 21 f-1: rename h -> i
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
+
+ $ hg log -Gfr 'desc("mGFm-0")' d
+ @ 29 mGFm-0 simple merge - the other way
+ |\
+ | o 25 g-1: update d
+ | |
+ o | 22 f-2: rename i -> d
+ | |
+ o | 21 f-1: rename h -> i
+ |/
+ o 2 i-2: c -move-> d
+ |
+ o 1 i-1: a -move-> c
+ |
+ o 0 i-0 initial commit: a b h
+
--- a/tests/test-copies-unrelated.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-copies-unrelated.t Thu Apr 16 22:51:09 2020 +0530
@@ -109,7 +109,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Add x, remove it, then add it back, then rename x to y. Similar to the case
above, but here the break in history is before the rename.
@@ -193,7 +193,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
#endif
$ hg co -qC 2
BROKEN: This should succeed and merge the changes from x into y
@@ -204,7 +204,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Add x, remove it, then add it back, rename x to y from the first commit.
Similar to the case above, but here the break in history is parallel to the
@@ -278,7 +278,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Add x on two branches, then rename x to y on one side. Similar to the case
above, but here the break in history is via the base commit.
@@ -361,7 +361,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
#endif
$ hg co -qC 2
BROKEN: This should succeed and merge the changes from x into y
@@ -372,7 +372,7 @@
What do you want to do? u
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Copies via null revision (there shouldn't be any)
$ newrepo
--- a/tests/test-copy-move-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-copy-move-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,6 +1,19 @@
Test for the full copytracing algorithm
=======================================
+
+Initial Setup
+=============
+
+use git diff to see rename
+
+ $ cat << EOF >> $HGRCPATH
+ > [diff]
+ > git=yes
+ > EOF
+
+Setup an history where one side copy and rename a file (and update it) while the other side update it.
+
$ hg init t
$ cd t
@@ -22,13 +35,67 @@
$ hg ci -qAm "other"
+ $ hg log -G --patch
+ @ changeset: 2:add3f11052fa
+ | tag: tip
+ | parent: 0:b8bf91eeebbc
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: other
+ |
+ | diff --git a/a b/a
+ | --- a/a
+ | +++ b/a
+ | @@ -1,1 +1,2 @@
+ | +0
+ | 1
+ |
+ | o changeset: 1:17c05bb7fcb6
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: second
+ |
+ | diff --git a/a b/b
+ | rename from a
+ | rename to b
+ | --- a/a
+ | +++ b/b
+ | @@ -1,1 +1,2 @@
+ | 1
+ | +2
+ | diff --git a/a b/c
+ | copy from a
+ | copy to c
+ | --- a/a
+ | +++ b/c
+ | @@ -1,1 +1,2 @@
+ | 1
+ | +2
+ |
+ o changeset: 0:b8bf91eeebbc
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: first
+
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,1 @@
+ +1
+
+
+Test Simple Merge
+=================
+
$ hg merge --debug
unmatched files in other:
b
c
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
- src: 'a' -> dst: 'c' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
+ src: 'a' -> dst: 'c' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -63,8 +130,10 @@
2
Test disabling copy tracing
+===========================
-- first verify copy metadata was kept
+first verify copy metadata was kept
+-----------------------------------
$ hg up -qC 2
$ hg rebase --keep -d 1 -b 2 --config extensions.rebase=
@@ -77,7 +146,8 @@
1
2
-- next verify copy metadata is lost when disabled
+ next verify copy metadata is lost when disabled
+------------------------------------------------
$ hg strip -r . --config extensions.strip=
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -98,6 +168,7 @@
$ cd ..
Verify disabling copy tracing still keeps copies from rebase source
+-------------------------------------------------------------------
$ hg init copydisable
$ cd copydisable
@@ -130,7 +201,14 @@
$ cd ../
-Verify we duplicate existing copies, instead of detecting them
+
+test storage preservation
+-------------------------
+
+Verify rebase do not discard recorded copies data when copy tracing usage is
+disabled.
+
+Setup
$ hg init copydisable3
$ cd copydisable3
@@ -153,6 +231,12 @@
|/
o 0 add a
+
+Actual Test
+
+A file is copied on one side and has been moved twice on the other side. the
+file is copied from `0:a`, so the file history of the `3:b` should trace directly to `0:a`.
+
$ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.copytrace=off
rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg
--- a/tests/test-copy.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-copy.t Thu Apr 16 22:51:09 2020 +0530
@@ -262,5 +262,113 @@
xyzzy: not overwriting - file exists
('hg copy --after' to record the copy)
[1]
+ $ hg co -qC .
+ $ rm baz xyzzy
+
+
+Test unmarking copy of a single file
+
+# Set up by creating a copy
+ $ hg cp bar baz
+# Test uncopying a non-existent file
+ $ hg copy --forget non-existent
+ non-existent: $ENOENT$
+# Test uncopying an tracked but unrelated file
+ $ hg copy --forget foo
+ foo: not unmarking as copy - file is not marked as copied
+# Test uncopying a copy source
+ $ hg copy --forget bar
+ bar: not unmarking as copy - file is not marked as copied
+# baz should still be marked as a copy
+ $ hg st -C
+ A baz
+ bar
+# Test the normal case
+ $ hg copy --forget baz
+ $ hg st -C
+ A baz
+# Test uncopy with matching an non-matching patterns
+ $ hg cp bar baz --after
+ $ hg copy --forget bar baz
+ bar: not unmarking as copy - file is not marked as copied
+ $ hg st -C
+ A baz
+# Test uncopy with no exact matches
+ $ hg cp bar baz --after
+ $ hg copy --forget .
+ $ hg st -C
+ A baz
+ $ hg forget baz
+ $ rm baz
+
+Test unmarking copy of a directory
+
+ $ mkdir dir
+ $ echo foo > dir/foo
+ $ echo bar > dir/bar
+ $ hg add dir
+ adding dir/bar
+ adding dir/foo
+ $ hg ci -m 'add dir/'
+ $ hg cp dir dir2
+ copying dir/bar to dir2/bar
+ copying dir/foo to dir2/foo
+ $ touch dir2/untracked
+ $ hg copy --forget dir2
+ $ hg st -C
+ A dir2/bar
+ A dir2/foo
+ ? dir2/untracked
+# Clean up for next test
+ $ hg forget dir2
+ removing dir2/bar
+ removing dir2/foo
+ $ rm -r dir2
+
+Test uncopy on committed copies
+
+# Commit some copies
+ $ hg cp bar baz
+ $ hg cp bar qux
+ $ hg ci -m copies
+ $ hg st -C --change .
+ A baz
+ bar
+ A qux
+ bar
+ $ base=$(hg log -r '.^' -T '{rev}')
+ $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
+ @ 5:a612dc2edfda copies
+ |
+ o 4:4800b1f1f38e add dir/
+ |
+ ~
+# Add a dirty change on top to show that it's unaffected
+ $ echo dirty >> baz
+ $ hg st
+ M baz
+ $ cat baz
+ bleah
+ dirty
+ $ hg copy --forget --at-rev . baz
+ saved backup bundle to $TESTTMP/part2/.hg/strip-backup/a612dc2edfda-e36b4448-uncopy.hg
+# The unwanted copy is no longer recorded, but the unrelated one is
+ $ hg st -C --change .
+ A baz
+ A qux
+ bar
+# The old commit is gone and we have updated to the new commit
+ $ hg log -G -T '{rev}:{node|short} {desc}\n' -r $base:
+ @ 5:c45090e5effe copies
+ |
+ o 4:4800b1f1f38e add dir/
+ |
+ ~
+# Working copy still has the uncommitted change
+ $ hg st
+ M baz
+ $ cat baz
+ bleah
+ dirty
$ cd ..
--- a/tests/test-copytrace-heuristics.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-copytrace-heuristics.t Thu Apr 16 22:51:09 2020 +0530
@@ -292,7 +292,8 @@
rebasing 1:472e38d57782 "mv a b"
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-17d50e29-rebase.hg
$ hg up -q c492ed3c7e35dcd1dc938053b8adf56e2cfbd062
- $ ls
+ $ ls -A
+ .hg
b
$ cd ..
$ rm -rf repo
@@ -433,7 +434,8 @@
rebasing 3:ef716627c70b "mod a" (tip)
merging b and a to b
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
- $ ls
+ $ ls -A
+ .hg
b
c
$ cat b
@@ -470,7 +472,8 @@
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
$ hg ci -m merge
- $ ls
+ $ ls -A
+ .hg
b
$ cd ..
$ rm -rf repo
@@ -505,7 +508,8 @@
merging b and a to b
merging c and a to c
saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
- $ ls
+ $ ls -A
+ .hg
b
c
$ cat b
@@ -558,7 +562,8 @@
|/ desc: mod a
o rev: 0, phase: draft
desc: initial
- $ ls
+ $ ls -A
+ .hg
c
$ cd ..
$ rm -rf repo
@@ -588,7 +593,8 @@
unshelving change 'default'
rebasing shelved changes
merging b and a to b
- $ ls
+ $ ls -A
+ .hg
b
$ cat b
b
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debugbackupbundle.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,39 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > strip=
+ > EOF
+
+Setup repo
+
+ $ hg init repo
+ $ cd repo
+
+Test backups list and recover
+
+ $ hg debugbackupbundle
+ no backup changesets found
+
+ $ mkcommit() {
+ > echo "$1" > "$1"
+ > hg add "$1"
+ > hg ci -l $1
+ > }
+ $ mkcommit a
+ $ mkcommit b
+ $ hg strip .
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d2ae7f538514-2953539b-backup.hg
+ $ hg debugbackupbundle
+ Recover changesets using: hg debugbackupbundle --recover <changeset hash>
+
+ Available backup changesets:
+ * (glob)
+ d2ae7f538514 b
+
+ $ hg debugbackupbundle --recover d2ae7f538514
+ Unbundling d2ae7f538514
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ new changesets d2ae7f538514 (1 drafts)
--- a/tests/test-debugcommands.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-debugcommands.t Thu Apr 16 22:51:09 2020 +0530
@@ -588,15 +588,15 @@
> EOF
$ "$PYTHON" debugstacktrace.py
stacktrace at:
- debugstacktrace.py:14 in * (glob)
- debugstacktrace.py:7 in f
+ *debugstacktrace.py:14 in * (glob)
+ *debugstacktrace.py:7 in f (glob)
hello from g at:
- debugstacktrace.py:14 in * (glob)
- debugstacktrace.py:8 in f
+ *debugstacktrace.py:14 in * (glob)
+ *debugstacktrace.py:8 in f (glob)
hi ...
from h hidden in g at:
- debugstacktrace.py:8 in f
- debugstacktrace.py:11 in g
+ *debugstacktrace.py:8 in f (glob)
+ *debugstacktrace.py:11 in g (glob)
Test debugcapabilities command:
@@ -615,6 +615,8 @@
changegroup
01
02
+ checkheads
+ related
digests
md5
sha1
@@ -650,7 +652,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
--- a/tests/test-devel-warnings.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-devel-warnings.t Thu Apr 16 22:51:09 2020 +0530
@@ -133,6 +133,7 @@
*/mercurial/server.py:* in runservice (glob)
*/mercurial/commandserver.py:* in run (glob)
*/mercurial/commandserver.py:* in _mainloop (glob)
+ */mercurial/commandserver.py:* in _acceptnewconnection (glob)
*/mercurial/commandserver.py:* in _runworker (glob)
*/mercurial/commandserver.py:* in _serverequest (glob)
*/mercurial/commandserver.py:* in serve (glob)
@@ -212,6 +213,7 @@
*/mercurial/server.py:* in runservice (glob)
*/mercurial/commandserver.py:* in run (glob)
*/mercurial/commandserver.py:* in _mainloop (glob)
+ */mercurial/commandserver.py:* in _acceptnewconnection (glob)
*/mercurial/commandserver.py:* in _runworker (glob)
*/mercurial/commandserver.py:* in _serverequest (glob)
*/mercurial/commandserver.py:* in serve (glob)
@@ -280,6 +282,7 @@
*/mercurial/server.py:* in runservice (glob)
*/mercurial/commandserver.py:* in run (glob)
*/mercurial/commandserver.py:* in _mainloop (glob)
+ */mercurial/commandserver.py:* in _acceptnewconnection (glob)
*/mercurial/commandserver.py:* in _runworker (glob)
*/mercurial/commandserver.py:* in _serverequest (glob)
*/mercurial/commandserver.py:* in serve (glob)
--- a/tests/test-doctest.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-doctest.py Thu Apr 16 22:51:09 2020 +0530
@@ -1,10 +1,12 @@
# this is hack to make sure no escape characters are inserted into the output
from __future__ import absolute_import
+from __future__ import print_function
import doctest
import os
import re
+import subprocess
import sys
ispy3 = sys.version_info[0] >= 3
@@ -49,47 +51,122 @@
runner.summarize()
-testmod('mercurial.changegroup')
-testmod('mercurial.changelog')
-testmod('mercurial.cmdutil')
-testmod('mercurial.color')
-testmod('mercurial.config')
-testmod('mercurial.context')
-testmod('mercurial.dagparser', optionflags=doctest.NORMALIZE_WHITESPACE)
-testmod('mercurial.dispatch')
-testmod('mercurial.encoding')
-testmod('mercurial.fancyopts')
-testmod('mercurial.formatter')
-testmod('mercurial.hg')
-testmod('mercurial.hgweb.hgwebdir_mod')
-testmod('mercurial.match')
-testmod('mercurial.mdiff')
-testmod('mercurial.minirst')
-testmod('mercurial.patch')
-testmod('mercurial.pathutil')
-testmod('mercurial.parser')
-testmod('mercurial.pycompat')
-testmod('mercurial.revlog')
-testmod('mercurial.revlogutils.deltas')
-testmod('mercurial.revset')
-testmod('mercurial.revsetlang')
-testmod('mercurial.smartset')
-testmod('mercurial.store')
-testmod('mercurial.subrepo')
-testmod('mercurial.templatefilters')
-testmod('mercurial.templater')
-testmod('mercurial.ui')
-testmod('mercurial.url')
-testmod('mercurial.util')
-testmod('mercurial.util', testtarget='platform')
-testmod('mercurial.utils.dateutil')
-testmod('mercurial.utils.stringutil')
-testmod('hgext.convert.convcmd')
-testmod('hgext.convert.cvsps')
-testmod('hgext.convert.filemap')
-testmod('hgext.convert.p4')
-testmod('hgext.convert.subversion')
-testmod('hgext.fix')
-testmod('hgext.mq')
-# Helper scripts in tests/ that have doctests:
-testmod('drawdag')
+DONT_RUN = []
+
+# Exceptions to the defaults for a given detected module. The value for each
+# module name is a list of dicts that specify the kwargs to pass to testmod.
+# testmod is called once per item in the list, so an empty list will cause the
+# module to not be tested.
+testmod_arg_overrides = {
+ 'i18n.check-translation': DONT_RUN, # may require extra installation
+ 'mercurial.dagparser': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
+ 'mercurial.keepalive': DONT_RUN, # >>> is an example, not a doctest
+ 'mercurial.posix': DONT_RUN, # run by mercurial.platform
+ 'mercurial.statprof': DONT_RUN, # >>> is an example, not a doctest
+ 'mercurial.util': [{}, {'testtarget': 'platform'}], # run twice!
+ 'mercurial.windows': DONT_RUN, # run by mercurial.platform
+ 'tests.test-url': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
+}
+
+fileset = 'set:(**.py)'
+
+cwd = os.path.dirname(os.environ["TESTDIR"])
+
+files = subprocess.check_output(
+ "hg files --print0 \"%s\"" % fileset, shell=True, cwd=cwd,
+).split(b'\0')
+
+if sys.version_info[0] >= 3:
+ cwd = os.fsencode(cwd)
+
+mods_tested = set()
+for f in files:
+ if not f:
+ continue
+
+ with open(os.path.join(cwd, f), "rb") as fh:
+ if not re.search(br'\n\s*>>>', fh.read()):
+ continue
+
+ if ispy3:
+ f = f.decode()
+
+ modname = f.replace('.py', '').replace('\\', '.').replace('/', '.')
+
+ # Third-party modules aren't our responsibility to test, and the modules in
+ # contrib generally do not have doctests in a good state, plus they're hard
+ # to import if this test is running with py2, so we just skip both for now.
+ if modname.startswith('mercurial.thirdparty.') or modname.startswith(
+ 'contrib.'
+ ):
+ continue
+
+ for kwargs in testmod_arg_overrides.get(modname, [{}]):
+ mods_tested.add((modname, '%r' % (kwargs,)))
+ if modname.startswith('tests.'):
+ # On py2, we can't import from tests.foo, but it works on both py2
+ # and py3 with the way that PYTHONPATH is setup to import without
+ # the 'tests.' prefix, so we do that.
+ modname = modname[len('tests.') :]
+
+ testmod(modname, **kwargs)
+
+# Meta-test: let's make sure that we actually ran what we expected to, above.
+# Each item in the set is a 2-tuple of module name and stringified kwargs passed
+# to testmod.
+expected_mods_tested = set(
+ [
+ ('hgext.convert.convcmd', '{}'),
+ ('hgext.convert.cvsps', '{}'),
+ ('hgext.convert.filemap', '{}'),
+ ('hgext.convert.p4', '{}'),
+ ('hgext.convert.subversion', '{}'),
+ ('hgext.fix', '{}'),
+ ('hgext.mq', '{}'),
+ ('mercurial.changelog', '{}'),
+ ('mercurial.cmdutil', '{}'),
+ ('mercurial.color', '{}'),
+ ('mercurial.config', '{}'),
+ ('mercurial.dagparser', "{'optionflags': 4}"),
+ ('mercurial.encoding', '{}'),
+ ('mercurial.fancyopts', '{}'),
+ ('mercurial.formatter', '{}'),
+ ('mercurial.hg', '{}'),
+ ('mercurial.hgweb.hgwebdir_mod', '{}'),
+ ('mercurial.match', '{}'),
+ ('mercurial.mdiff', '{}'),
+ ('mercurial.minirst', '{}'),
+ ('mercurial.parser', '{}'),
+ ('mercurial.patch', '{}'),
+ ('mercurial.pathutil', '{}'),
+ ('mercurial.pycompat', '{}'),
+ ('mercurial.revlogutils.deltas', '{}'),
+ ('mercurial.revset', '{}'),
+ ('mercurial.revsetlang', '{}'),
+ ('mercurial.simplemerge', '{}'),
+ ('mercurial.smartset', '{}'),
+ ('mercurial.store', '{}'),
+ ('mercurial.subrepo', '{}'),
+ ('mercurial.templater', '{}'),
+ ('mercurial.ui', '{}'),
+ ('mercurial.util', "{'testtarget': 'platform'}"),
+ ('mercurial.util', '{}'),
+ ('mercurial.utils.dateutil', '{}'),
+ ('mercurial.utils.stringutil', '{}'),
+ ('tests.drawdag', '{}'),
+ ('tests.test-run-tests', '{}'),
+ ('tests.test-url', "{'optionflags': 4}"),
+ ]
+)
+
+unexpectedly_run = mods_tested.difference(expected_mods_tested)
+not_run = expected_mods_tested.difference(mods_tested)
+
+if unexpectedly_run:
+ print('Unexpectedly ran (probably need to add to list):')
+ for r in sorted(unexpectedly_run):
+ print(' %r' % (r,))
+if not_run:
+ print('Expected to run, but was not run (doctest removed?):')
+ for r in sorted(not_run):
+ print(' %r' % (r,))
--- a/tests/test-double-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-double-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -29,7 +29,8 @@
unmatched files in other:
bar
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'foo' -> dst: 'bar' *
+ on remote side:
+ src: 'foo' -> dst: 'bar' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
--- a/tests/test-extension-timing.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-extension-timing.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,3 +1,5 @@
+#require no-chg
+
Test basic extension support
$ cat > foobar.py <<EOF
--- a/tests/test-fastannotate-revmap.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-fastannotate-revmap.py Thu Apr 16 22:51:09 2020 +0530
@@ -165,7 +165,7 @@
rm2.flush()
# two files should be the same
- ensure(len(set(util.readfile(p) for p in [path, path2])) == 1)
+ ensure(len({util.readfile(p) for p in [path, path2]}) == 1)
os.unlink(path)
os.unlink(path2)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-fastexport.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,865 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > fastexport=
+ > EOF
+
+ $ hg init
+
+ $ hg debugbuilddag -mon '+2:tbase @name1 +3:thead1 <tbase @name2 +4:thead2 @both /thead1 +2:tmaintip'
+
+ $ hg up -r 10
+ 13 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rm nf10
+ $ hg commit -u debugbuilddag --date 'Thu Jan 01 00:00:12 1970 +0000' -m r12
+ created new head
+ $ hg up -r 11
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge -r 12
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg commit -m debugbuilddag --date 'Thu Jan 01 00:00:13 1970 +0000'
+
+ $ hg log -G
+ @ changeset: 13:e5c379648af4
+ |\ branch: both
+ | | tag: tip
+ | | parent: 11:2cbd52c10e88
+ | | parent: 12:4f31c9604af6
+ | | user: test
+ | | date: Thu Jan 01 00:00:13 1970 +0000
+ | | summary: debugbuilddag
+ | |
+ | o changeset: 12:4f31c9604af6
+ | | branch: both
+ | | parent: 10:9220596cb068
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:12 1970 +0000
+ | | summary: r12
+ | |
+ o | changeset: 11:2cbd52c10e88
+ |/ branch: both
+ | tag: tmaintip
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:11 1970 +0000
+ | summary: r11
+ |
+ o changeset: 10:9220596cb068
+ | branch: both
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:10 1970 +0000
+ | summary: r10
+ |
+ o changeset: 9:0767d147d86e
+ |\ branch: both
+ | | parent: 8:0d0219415f18
+ | | parent: 4:e8bc3a6ab9ae
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:09 1970 +0000
+ | | summary: r9
+ | |
+ | o changeset: 8:0d0219415f18
+ | | branch: name2
+ | | tag: thead2
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:08 1970 +0000
+ | | summary: r8
+ | |
+ | o changeset: 7:82c6c8b3ac68
+ | | branch: name2
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:07 1970 +0000
+ | | summary: r7
+ | |
+ | o changeset: 6:94093a13175f
+ | | branch: name2
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:06 1970 +0000
+ | | summary: r6
+ | |
+ | o changeset: 5:4baee2f72e9e
+ | | branch: name2
+ | | parent: 1:bf4022f1addd
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:05 1970 +0000
+ | | summary: r5
+ | |
+ o | changeset: 4:e8bc3a6ab9ae
+ | | branch: name1
+ | | tag: thead1
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:04 1970 +0000
+ | | summary: r4
+ | |
+ o | changeset: 3:46148e496a8a
+ | | branch: name1
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:03 1970 +0000
+ | | summary: r3
+ | |
+ o | changeset: 2:29863c4219cd
+ |/ branch: name1
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:02 1970 +0000
+ | summary: r2
+ |
+ o changeset: 1:bf4022f1addd
+ | tag: tbase
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:01 1970 +0000
+ | summary: r1
+ |
+ o changeset: 0:ae6ae30a671b
+ user: debugbuilddag
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: r0
+
+
+ $ hg fastexport --export-marks fastexport.marks
+ blob
+ mark :1
+ data 65
+ 0 r0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :2
+ data 3
+ r0
+
+ commit refs/heads/default
+ mark :3
+ committer "debugbuilddag" <debugbuilddag> 0 -0000
+ data 2
+ r0
+ M 644 :1 mf
+ M 644 :2 nf0
+ M 644 :2 of
+
+ blob
+ mark :4
+ data 68
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :5
+ data 3
+ r1
+
+ blob
+ mark :6
+ data 3
+ r1
+
+ commit refs/heads/default
+ mark :7
+ committer "debugbuilddag" <debugbuilddag> 1 -0000
+ data 2
+ r1
+ from :3
+ M 644 :4 mf
+ M 644 :5 nf1
+ M 644 :6 of
+
+ blob
+ mark :8
+ data 71
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :9
+ data 3
+ r2
+
+ blob
+ mark :10
+ data 3
+ r2
+
+ commit refs/heads/name1
+ mark :11
+ committer "debugbuilddag" <debugbuilddag> 2 -0000
+ data 2
+ r2
+ from :7
+ M 644 :8 mf
+ M 644 :9 nf2
+ M 644 :10 of
+
+ blob
+ mark :12
+ data 74
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6 r3
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :13
+ data 3
+ r3
+
+ blob
+ mark :14
+ data 3
+ r3
+
+ commit refs/heads/name1
+ mark :15
+ committer "debugbuilddag" <debugbuilddag> 3 -0000
+ data 2
+ r3
+ from :11
+ M 644 :12 mf
+ M 644 :13 nf3
+ M 644 :14 of
+
+ blob
+ mark :16
+ data 77
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6 r3
+ 7
+ 8 r4
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :17
+ data 3
+ r4
+
+ blob
+ mark :18
+ data 3
+ r4
+
+ commit refs/heads/name1
+ mark :19
+ committer "debugbuilddag" <debugbuilddag> 4 -0000
+ data 2
+ r4
+ from :15
+ M 644 :16 mf
+ M 644 :17 nf4
+ M 644 :18 of
+
+ blob
+ mark :20
+ data 71
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10 r5
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :21
+ data 3
+ r5
+
+ blob
+ mark :22
+ data 3
+ r5
+
+ commit refs/heads/name2
+ mark :23
+ committer "debugbuilddag" <debugbuilddag> 5 -0000
+ data 2
+ r5
+ from :7
+ M 644 :20 mf
+ M 644 :21 nf5
+ M 644 :22 of
+
+ blob
+ mark :24
+ data 74
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :25
+ data 3
+ r6
+
+ blob
+ mark :26
+ data 3
+ r6
+
+ commit refs/heads/name2
+ mark :27
+ committer "debugbuilddag" <debugbuilddag> 6 -0000
+ data 2
+ r6
+ from :23
+ M 644 :24 mf
+ M 644 :25 nf6
+ M 644 :26 of
+
+ blob
+ mark :28
+ data 77
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14 r7
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :29
+ data 3
+ r7
+
+ blob
+ mark :30
+ data 3
+ r7
+
+ commit refs/heads/name2
+ mark :31
+ committer "debugbuilddag" <debugbuilddag> 7 -0000
+ data 2
+ r7
+ from :27
+ M 644 :28 mf
+ M 644 :29 nf7
+ M 644 :30 of
+
+ blob
+ mark :32
+ data 80
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14 r7
+ 15
+ 16 r8
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :33
+ data 3
+ r8
+
+ blob
+ mark :34
+ data 3
+ r8
+
+ commit refs/heads/name2
+ mark :35
+ committer "debugbuilddag" <debugbuilddag> 8 -0000
+ data 2
+ r8
+ from :31
+ M 644 :32 mf
+ M 644 :33 nf8
+ M 644 :34 of
+
+ blob
+ mark :36
+ data 92
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6 r3
+ 7
+ 8 r4
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14 r7
+ 15
+ 16 r8
+ 17
+ 18 r9
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :37
+ data 3
+ r9
+
+ blob
+ mark :38
+ data 3
+ r9
+
+ commit refs/heads/both
+ mark :39
+ committer "debugbuilddag" <debugbuilddag> 9 -0000
+ data 2
+ r9
+ from :35
+ merge :19
+ M 644 :36 mf
+ M 644 :9 nf2
+ M 644 :13 nf3
+ M 644 :17 nf4
+ M 644 :37 nf9
+ M 644 :38 of
+
+ blob
+ mark :40
+ data 96
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6 r3
+ 7
+ 8 r4
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14 r7
+ 15
+ 16 r8
+ 17
+ 18 r9
+ 19
+ 20 r10
+ 21
+ 22
+ 23
+
+ blob
+ mark :41
+ data 4
+ r10
+
+ blob
+ mark :42
+ data 4
+ r10
+
+ commit refs/heads/both
+ mark :43
+ committer "debugbuilddag" <debugbuilddag> 10 -0000
+ data 3
+ r10
+ from :39
+ M 644 :40 mf
+ M 644 :41 nf10
+ M 644 :42 of
+
+ blob
+ mark :44
+ data 100
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4 r2
+ 5
+ 6 r3
+ 7
+ 8 r4
+ 9
+ 10 r5
+ 11
+ 12 r6
+ 13
+ 14 r7
+ 15
+ 16 r8
+ 17
+ 18 r9
+ 19
+ 20 r10
+ 21
+ 22 r11
+ 23
+
+ blob
+ mark :45
+ data 4
+ r11
+
+ blob
+ mark :46
+ data 4
+ r11
+
+ commit refs/heads/both
+ mark :47
+ committer "debugbuilddag" <debugbuilddag> 11 -0000
+ data 3
+ r11
+ from :43
+ M 644 :44 mf
+ M 644 :45 nf11
+ M 644 :46 of
+
+ commit refs/heads/both
+ mark :48
+ committer "debugbuilddag" <debugbuilddag> 12 -0000
+ data 3
+ r12
+ from :43
+ D nf10
+
+ commit refs/heads/both
+ mark :49
+ committer "test" <test> 13 -0000
+ data 13
+ debugbuilddag
+ from :47
+ merge :48
+ D nf10
+
+ $ cat fastexport.marks
+ e1767c7564f83127d75331428473dd0512b36cc6
+ 2c436e3f677d989438ddd9a7e5e4d56e016dfd35
+ ae6ae30a671be09096aaaf51217b3691eec0eee0
+ 016f8fd6128ac4bd19ec5a6ae128dadc3873b13f
+ a0e6fc91007068df3bc60f46ce0a893a73189b54
+ 1a085e1daf625e186ee0064c64ff41731a901f24
+ bf4022f1addd28523fb1122ac6166a29da58d34c
+ 2c45ad1c720111830380baa89a6a16cae1bef688
+ 180506669a19f4b8317009fc6fa0043966d1ffb4
+ 1ebc486e6a5c2c8ca7e531cf0b63dfcc071ec324
+ 29863c4219cd68e0f57aecd5ffc12ba83313f26b
+ d20e5eeac6991189eefad45cd8ea0f6a32ce8122
+ 710c4580a600b8aadc63fa3d7bb0fab71b127c04
+ fa27314b56d7b6f90c1caeebb2a74730b3747574
+ 46148e496a8a75fde9e203b1ded69ec99289af27
+ e5548c667d7eeb6c326e723c579888341329c9fe
+ 3c1407305701051cbed9f9cb9a68bdfb5997c235
+ e2ed51893b0a54bd7fef5a406a0c489d668f19c3
+ e8bc3a6ab9aef589f5db504f401953449a3c3a10
+ 558f3a23efc0a1a972e14d5314a65918791b77be
+ 0dbd89c185f53a1727c54cd1ce256482fa23968e
+ f84faeb138605b36d74324c6d0ea76a9099c3567
+ 4baee2f72e9eeae2aef5b9e1ec416020090672ef
+ 412c5793886eaaabb31debd36695f6215a719865
+ a0eafc60760d32b690564b8588ba042cc63e0c74
+ a53842517de32d2f926c38a170c29dc90ae3348a
+ 94093a13175f1cfcbbfddaa0ceafbd3a89784b91
+ d2f0d76af0be0da17ec88190215eadb8706689ab
+ 639939af794373d6c2ab12c2ef637cd220174389
+ cc8921e2b19a88147643ea825459ffa140e3d704
+ 82c6c8b3ac6873fadd9083323b02cc6a53659130
+ c6cc0b14a3e6e61906242d6fce28b9510c9f9208
+ 093593169cb4716f94e52ed7561bb84b36b7eb9d
+ 034df75dc138e7507e061d26170b4c44321a5d92
+ 0d0219415f18c43636163fff4160f41600951a25
+ f13693f6e6052eeb189521945fef56892e812fdb
+ 1239c633b8a7a7283825dba9171bf285e5790852
+ 34b655bd51e8573b8e85c1c1476a94d8573babef
+ 0767d147d86e1546593bda50f1e11276c0ac8f1a
+ 284ca43bbbe82e89c0f1d977e8ac6cfb969c05ec
+ 15315ab9e272ec81ae8d847996e5bdecd5635b0b
+ 78c10aaf21f49d518c7ccb8318c29abb5d4e5db7
+ 9220596cb068dfc73e2f7e695dc8ad0858a936db
+ 32abd0da49b7c7ee756298fc46a15584d6aedc99
+ 33fbc651630ffa7ccbebfe4eb91320a873e7291c
+ 868d828870663d075cdcff502d26cf8445ce068e
+ 2cbd52c10e88ce604402dc83a869ec4f07765b3d
+ 4f31c9604af676986343d775b05695f535e8db5e
+ e5c379648af4c9fa3b5546ab7ee6e61a36082830
+
+ $ hg fastexport --export-marks fastexport.marks2 -r 0
+ blob
+ mark :1
+ data 65
+ 0 r0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :2
+ data 3
+ r0
+
+ commit refs/heads/default
+ mark :3
+ committer "debugbuilddag" <debugbuilddag> 0 -0000
+ data 2
+ r0
+ M 644 :1 mf
+ M 644 :2 nf0
+ M 644 :2 of
+
+ $ cat fastexport.marks2
+ e1767c7564f83127d75331428473dd0512b36cc6
+ 2c436e3f677d989438ddd9a7e5e4d56e016dfd35
+ ae6ae30a671be09096aaaf51217b3691eec0eee0
+ $ hg fastexport --import-marks fastexport.marks2 -r 1
+ blob
+ mark :4
+ data 68
+ 0 r0
+ 1
+ 2 r1
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ 13
+ 14
+ 15
+ 16
+ 17
+ 18
+ 19
+ 20
+ 21
+ 22
+ 23
+
+ blob
+ mark :5
+ data 3
+ r1
+
+ blob
+ mark :6
+ data 3
+ r1
+
+ commit refs/heads/default
+ mark :7
+ committer "debugbuilddag" <debugbuilddag> 1 -0000
+ data 2
+ r1
+ from :3
+ M 644 :4 mf
+ M 644 :5 nf1
+ M 644 :6 of
+
+ $ echo foo > of
+ $ hg commit --user '<badname> <bad email>' --date 'Fri Jan 02 00:00:00 1970 +0000' -m 'Testcommit'
+ $ hg fastexport --import-marks fastexport.marks -r tip
+ blob
+ mark :50
+ data 4
+ foo
+
+ abort: Unable to parse user into person and email for revision 4f71ca786403919cd16669d94ff7cd1c09437a44
+ [255]
--- a/tests/test-fetch.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-fetch.t Thu Apr 16 22:51:09 2020 +0530
@@ -62,7 +62,8 @@
merging with 1:d36c0562f908
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
new changeset 3:a323a0c43ec4 merges remote changes with local
- $ ls c
+ $ ls -A c
+ .hg
a
b
c
--- a/tests/test-fix-topology.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-fix-topology.t Thu Apr 16 22:51:09 2020 +0530
@@ -21,6 +21,7 @@
$ cat >> $HGRCPATH <<EOF
> [extensions]
> fix =
+ > strip =
> [fix]
> uppercase-whole-file:command="$PYTHON" $UPPERCASEPY
> uppercase-whole-file:pattern=set:**
@@ -128,12 +129,8 @@
Fix all but the root revision and its four children.
-#if obsstore-on
$ hg fix -r '2|4|7|8|9' --working-dir
-#else
- $ hg fix -r '2|4|7|8|9' --working-dir
- saved backup bundle to * (glob)
-#endif
+ saved backup bundle to * (glob) (obsstore-off !)
The five revisions remain, but the other revisions were fixed and replaced. All
parent pointers have been accurately set to reproduce the previous topology
@@ -266,6 +263,111 @@
$ cd ..
+
+Test the --source option. We only do this with obsstore on to avoid duplicating
+test code. We rely on the other tests to prove that obsolescence is not an
+important factor here.
+
+#if obsstore-on
+ $ hg init source-arg
+ $ cd source-arg
+ $ printf "aaaa\n" > a
+ $ hg commit -Am "change A"
+ adding a
+ $ printf "bbbb\n" > b
+ $ hg commit -Am "change B"
+ adding b
+ $ printf "cccc\n" > c
+ $ hg commit -Am "change C"
+ adding c
+ $ hg checkout 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ printf "dddd\n" > d
+ $ hg commit -Am "change D"
+ adding d
+ created new head
+ $ hg log --graph --template '{rev} {desc}\n'
+ @ 3 change D
+ |
+ | o 2 change C
+ | |
+ | o 1 change B
+ |/
+ o 0 change A
+
+
+Test passing 'wdir()' to --source
+ $ printf "xxxx\n" > x
+ $ hg add x
+ $ hg fix -s 'wdir()'
+ $ cat *
+ aaaa
+ dddd
+ XXXX
+
+Test passing '.' to --source
+ $ printf "xxxx\n" > x
+ $ hg fix -s .
+ $ hg log --graph --template '{rev} {desc}\n'
+ @ 4 change D
+ |
+ | o 2 change C
+ | |
+ | o 1 change B
+ |/
+ o 0 change A
+
+ $ cat *
+ aaaa
+ DDDD
+ XXXX
+ $ hg strip -qf 4
+ $ hg co -q 3
+
+Test passing other branch to --source
+ $ printf "xxxx\n" > x
+ $ hg add x
+ $ hg fix -s 2
+ $ hg log --graph --template '{rev} {desc}\n'
+ o 4 change C
+ |
+ | @ 3 change D
+ | |
+ o | 1 change B
+ |/
+ o 0 change A
+
+ $ hg cat -r 4 b c
+ bbbb
+ CCCC
+ $ cat *
+ aaaa
+ dddd
+ xxxx
+ $ hg strip -qf 4
+
+Test passing multiple revisions to --source
+ $ hg fix -s '2 + .'
+ $ hg log --graph --template '{rev} {desc}\n'
+ @ 5 change D
+ |
+ | o 4 change C
+ | |
+ | o 1 change B
+ |/
+ o 0 change A
+
+ $ hg cat -r 4 b c
+ bbbb
+ CCCC
+ $ cat *
+ aaaa
+ DDDD
+ XXXX
+
+ $ cd ..
+#endif
+
The --all flag should fix anything that wouldn't cause a problem if you fixed
it, including the working copy. Obsolete revisions are not fixed because that
could cause divergence. Public revisions would cause an abort because they are
@@ -275,6 +377,9 @@
$ hg init fixall
$ cd fixall
+ $ hg fix --all --working-dir
+ abort: cannot specify both --working-dir and --all
+ [255]
#if obsstore-on
$ printf "one\n" > foo.whole
--- a/tests/test-fix.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-fix.t Thu Apr 16 22:51:09 2020 +0530
@@ -104,12 +104,12 @@
options ([+] can be repeated):
- --all fix all non-public non-obsolete revisions
- --base REV [+] revisions to diff against (overrides automatic selection,
- and applies to every revision being fixed)
- -r --rev REV [+] revisions to fix
- -w --working-dir fix the working directory
- --whole always fix every line of a file
+ --all fix all non-public non-obsolete revisions
+ --base REV [+] revisions to diff against (overrides automatic selection,
+ and applies to every revision being fixed)
+ -s --source REV [+] fix the specified revisions and their descendants
+ -w --working-dir fix the working directory
+ --whole always fix every line of a file
(some details hidden, use --verbose to show complete help)
--- a/tests/test-fncache.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-fncache.t Thu Apr 16 22:51:09 2020 +0530
@@ -356,7 +356,7 @@
$ cat .hg/store/fncache | sort
data/y.i
data/z.i
- $ hg recover
+ $ hg recover --verify
rolling back interrupted transaction
checking changesets
checking manifests
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-git-interop.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,250 @@
+#require pygit2
+
+Setup:
+ $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
+ > GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL
+ > GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0000"; export GIT_AUTHOR_DATE
+ > GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
+ > GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
+ > GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
+ > count=10
+ > gitcommit() {
+ > GIT_AUTHOR_DATE="2007-01-01 00:00:$count +0000";
+ > GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
+ > git commit "$@" >/dev/null 2>/dev/null || echo "git commit error"
+ > count=`expr $count + 1`
+ > }
+
+
+Test auto-loading extension works:
+ $ mkdir nogit
+ $ cd nogit
+ $ mkdir .hg
+ $ echo git >> .hg/requires
+ $ hg status
+ abort: repository specified git format in .hg/requires but has no .git directory
+ [255]
+ $ git init
+ Initialized empty Git repository in $TESTTMP/nogit/.git/
+This status invocation shows some hg gunk because we didn't use
+`hg init --git`, which fixes up .git/info/exclude for us.
+ $ hg status
+ ? .hg/cache/git-commits.sqlite
+ ? .hg/cache/git-commits.sqlite-shm
+ ? .hg/cache/git-commits.sqlite-wal
+ ? .hg/requires
+ $ cd ..
+
+Now globally enable extension for the rest of the test:
+ $ echo "[extensions]" >> $HGRCPATH
+ > echo "git=" >> $HGRCPATH
+
+Make a new repo with git:
+ $ mkdir foo
+ $ cd foo
+ $ git init
+ Initialized empty Git repository in $TESTTMP/foo/.git/
+Ignore the .hg directory within git:
+ $ echo .hg >> .git/info/exclude
+ $ echo alpha > alpha
+ $ git add alpha
+ $ gitcommit -am 'Add alpha'
+ $ echo beta > beta
+ $ git add beta
+ $ gitcommit -am 'Add beta'
+ $ echo gamma > gamma
+ $ git status
+ On branch master
+ Untracked files:
+ (use "git add <file>..." to include in what will be committed)
+ gamma
+
+ nothing added to commit but untracked files present (use "git add" to track)
+
+Without creating the .hg, hg status fails:
+ $ hg status
+ abort: no repository found in '$TESTTMP/foo' (.hg not found)!
+ [255]
+But if you run hg init --git, it works:
+ $ hg init --git
+ $ hg id --traceback
+ 3d9be8deba43 tip master
+ $ hg status
+ ? gamma
+Log works too:
+ $ hg log
+ changeset: 1:3d9be8deba43
+ bookmark: master
+ tag: tip
+ user: test <test@example.org>
+ date: Mon Jan 01 00:00:11 2007 +0000
+ summary: Add beta
+
+ changeset: 0:c5864c9d16fb
+ user: test <test@example.org>
+ date: Mon Jan 01 00:00:10 2007 +0000
+ summary: Add alpha
+
+
+
+and bookmarks:
+ $ hg bookmarks
+ * master 1:3d9be8deba43
+
+diff even works transparently in both systems:
+ $ echo blah >> alpha
+ $ git diff
+ diff --git a/alpha b/alpha
+ index 4a58007..faed1b7 100644
+ --- a/alpha
+ +++ b/alpha
+ @@ -1* +1,2 @@ (glob)
+ alpha
+ +blah
+ $ hg diff --git
+ diff --git a/alpha b/alpha
+ --- a/alpha
+ +++ b/alpha
+ @@ -1,1 +1,2 @@
+ alpha
+ +blah
+
+Remove a file, it shows as such:
+ $ rm alpha
+ $ hg status
+ ! alpha
+ ? gamma
+
+Revert works:
+ $ hg revert alpha --traceback
+ $ hg status
+ ? gamma
+ $ git status
+ On branch master
+ Untracked files:
+ (use "git add <file>..." to include in what will be committed)
+ gamma
+
+ nothing added to commit but untracked files present (use "git add" to track)
+
+Add shows sanely in both:
+ $ hg add gamma
+ $ hg status
+ A gamma
+ $ hg files
+ alpha
+ beta
+ gamma
+ $ git ls-files
+ alpha
+ beta
+ gamma
+ $ git status
+ On branch master
+ Changes to be committed:
+ (use "git restore --staged <file>..." to unstage)
+ new file: gamma
+
+
+forget does what it should as well:
+ $ hg forget gamma
+ $ hg status
+ ? gamma
+ $ git status
+ On branch master
+ Untracked files:
+ (use "git add <file>..." to include in what will be committed)
+ gamma
+
+ nothing added to commit but untracked files present (use "git add" to track)
+
+clean up untracked file
+ $ rm gamma
+
+hg log FILE
+
+ $ echo a >> alpha
+ $ hg ci -m 'more alpha' --traceback --date '1583522787 18000'
+ $ echo b >> beta
+ $ hg ci -m 'more beta'
+ $ echo a >> alpha
+ $ hg ci -m 'even more alpha'
+ $ hg log -G alpha
+ @ changeset: 4:6626247b7dc8
+ : bookmark: master
+ : tag: tip
+ : user: test <test>
+ : date: Thu Jan 01 00:00:00 1970 +0000
+ : summary: even more alpha
+ :
+ o changeset: 2:a1983dd7fb19
+ : user: test <test>
+ : date: Fri Mar 06 14:26:27 2020 -0500
+ : summary: more alpha
+ :
+ o changeset: 0:c5864c9d16fb
+ user: test <test@example.org>
+ date: Mon Jan 01 00:00:10 2007 +0000
+ summary: Add alpha
+
+ $ hg log -G beta
+ o changeset: 3:d8ee22687733
+ : user: test <test>
+ : date: Thu Jan 01 00:00:00 1970 +0000
+ : summary: more beta
+ :
+ o changeset: 1:3d9be8deba43
+ | user: test <test@example.org>
+ ~ date: Mon Jan 01 00:00:11 2007 +0000
+ summary: Add beta
+
+
+hg annotate
+
+ $ hg annotate alpha
+ 0: alpha
+ 2: a
+ 4: a
+ $ hg annotate beta
+ 1: beta
+ 3: b
+
+
+Files in subdirectories. TODO: case-folding support, make this `A`
+instead of `a`.
+
+ $ mkdir a
+ $ echo "This is file mu." > a/mu
+ $ hg ci -A -m 'Introduce file a/mu'
+ adding a/mu
+
+Both hg and git agree a/mu is part of the repo
+
+ $ git ls-files
+ a/mu
+ alpha
+ beta
+ $ hg files
+ a/mu
+ alpha
+ beta
+
+hg and git status both clean
+
+ $ git status
+ On branch master
+ nothing to commit, working tree clean
+ $ hg status
+
+
+node|shortest works correctly
+ $ hg log -T '{node}\n' | sort
+ 3d9be8deba43482be2c81a4cb4be1f10d85fa8bc
+ 6626247b7dc8f231b183b8a4761c89139baca2ad
+ a1983dd7fb19cbd83ad5a1c2fc8bf3d775dea12f
+ ae1ab744f95bfd5b07cf573baef98a778058537b
+ c5864c9d16fb3431fe2c175ff84dc6accdbb2c18
+ d8ee22687733a1991813560b15128cd9734f4b48
+ $ hg log -r ae1ab744f95bfd5b07cf573baef98a778058537b --template "{shortest(node,1)}\n"
+ ae
+
--- a/tests/test-graft-interrupted.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-graft-interrupted.t Thu Apr 16 22:51:09 2020 +0530
@@ -32,7 +32,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Writing the nodes in old format to graftstate
@@ -91,7 +91,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ echo wat > b
$ hg resolve -m
@@ -141,7 +141,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ echo foobar > b
$ hg resolve -m
@@ -180,7 +180,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ echo foobar > b
$ hg resolve -m
@@ -246,7 +246,7 @@
warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg graft --stop --continue
abort: cannot use '--continue' and '--stop' together
@@ -283,7 +283,7 @@
warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg graft --stop
stopped the interrupted graft
@@ -352,7 +352,7 @@
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg graft --continue --abort
abort: cannot use '--continue' and '--abort' together
@@ -399,7 +399,7 @@
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg abort
graft aborted
@@ -426,12 +426,12 @@
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg log -GT "{rev}:{node|short} {desc}"
@ 6:6ec71c037d94 added x
|
- | o 5:36b793615f78 added foo to c
+ | % 5:36b793615f78 added foo to c
| |
| | o 4:863a25e1a9ea added x
| |/
@@ -488,7 +488,7 @@
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ cd ..
$ hg init pullrepo
@@ -609,7 +609,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Resolve conflict:
$ echo A>a
@@ -622,7 +622,7 @@
$ hg log -GT "{rev}:{node|short} {desc}\n"
@ 4:2aa9ad1006ff B in file a
|
- | o 3:09e253b87e17 A in file a
+ | % 3:09e253b87e17 A in file a
| |
| o 2:d36c0562f908 c
| |
@@ -648,7 +648,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Resolve conflict:
$ echo A>a
@@ -669,7 +669,7 @@
$ hg log -GT "{rev}:{node|short} {desc}\n"
@ 4:2aa9ad1006ff B in file a
|
- | o 3:09e253b87e17 A in file a
+ | % 3:09e253b87e17 A in file a
| |
| o 2:d36c0562f908 c
| |
@@ -688,7 +688,7 @@
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ echo A>a
$ hg resolve --mark
@@ -712,7 +712,7 @@
$ hg log -GT "{rev}:{node|short} {desc}\n"
@ 4:2aa9ad1006ff B in file a
|
- | o 3:09e253b87e17 A in file a
+ | % 3:09e253b87e17 A in file a
| |
| o 2:d36c0562f908 c
| |
--- a/tests/test-graft.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-graft.t Thu Apr 16 22:51:09 2020 +0530
@@ -204,7 +204,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
@@ -223,7 +224,8 @@
updating the branch cache
grafting 5:97f8bfe72746 "5"
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'c' -> dst: 'b'
+ on local side:
+ src: 'c' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
@@ -239,7 +241,8 @@
scanning for duplicate grafts
grafting 4:9c233e8e184d "4"
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'c' -> dst: 'b'
+ on local side:
+ src: 'c' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
@@ -257,7 +260,7 @@
warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Summary should mention graft:
@@ -314,7 +317,7 @@
warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
Continue without resolve should fail:
@@ -508,7 +511,7 @@
grafting 1:5d205f8b35b6 "1"
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg resolve --all
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
@@ -548,7 +551,7 @@
grafting 2:5c095ad7e90f "2"
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg resolve --all
merging a and b to b
(no more unresolved files)
@@ -746,12 +749,16 @@
scanning for duplicate grafts
grafting 13:7a4785234d87 "2"
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: True, partial: False
ancestor: b592ea63bb0c, local: 7e61b508e709+, remote: 7a4785234d87
starting 4 threads for background file closing (?)
+ nothing to commit, clearing merge state
note: graft of 13:7a4785234d87 created no changes to commit
$ hg log -r 'destination(13)'
All copies of a cset
@@ -807,13 +814,14 @@
note: graft of 19:9627f653b421 created no changes to commit
grafting 0:68795b066622 "0"
-graft --force after backout
+graft --force after backout. Do the backout with graft too, to make
+sure we support issue6248.
$ echo abc > a
$ hg ci -m 24
- $ hg backout 24
- reverting a
- changeset 25:71c4e63d4f98 backs out changeset 24:2e7ea477be26
+ $ hg graft --base . -r ".^" --no-commit
+ grafting 23:b1cac6de36a9 "0"
+ $ hg commit -m 'Backed out changeset 2e7ea477be26'
$ hg graft 24
skipping ancestor revision 24:2e7ea477be26
[255]
@@ -831,7 +839,7 @@
grafting 24:2e7ea477be26 "24"
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ hg resolve --all
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
--- a/tests/test-help.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-help.t Thu Apr 16 22:51:09 2020 +0530
@@ -364,6 +364,7 @@
eol automatically manage newlines in repository files
extdiff command to allow external programs to compare revisions
factotum http authentication with factotum
+ fastexport export repositories as git fast-import stream
githelp try mapping git commands to Mercurial commands
gpg commands to sign and verify changesets
hgk browse the repository in a graphical way
@@ -787,6 +788,12 @@
(use 'hg help extensions' for information on enabling extensions)
[255]
+Checking that help adapts based on the config:
+
+ $ hg help diff --config ui.tweakdefaults=true | egrep -e '^ *(-g|config)'
+ -g --[no-]git use git extended diff format (default: on from
+ config)
+
Make sure that we don't run afoul of the help system thinking that
this is a section and erroring out weirdly.
@@ -966,6 +973,8 @@
find the ancestor revision of two revisions in a given index
debugapplystreamclonebundle
apply a stream clone bundle file
+ debugbackupbundle
+ lists the changesets available in backup bundles
debugbuilddag
builds a repo with a given DAG from scratch in the current
empty repo
@@ -1017,6 +1026,7 @@
print merge state
debugnamecomplete
complete "names" - tags, open branch names, bookmark names
+ debugnodemap write and inspect on disk nodemap
debugobsolete
create arbitrary obsolete marker
debugoptADV (no help text available)
@@ -1054,6 +1064,8 @@
debugsub (no help text available)
debugsuccessorssets
show set of successors for revision
+ debugtagscache
+ display the contents of .hg/cache/hgtagsfnodes1
debugtemplate
parse and apply a template
debuguigetpass
--- a/tests/test-hgrc.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-hgrc.t Thu Apr 16 22:51:09 2020 +0530
@@ -56,7 +56,7 @@
$ echo '%include $TESTTMP/included' >> $HGRC
$ hg showconfig section
section.option=value
-#if no-windows
+#if unix-permissions no-root
$ chmod u-r $TESTTMP/included
$ hg showconfig section
hg: parse error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied)
--- a/tests/test-hgweb-auth.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-hgweb-auth.py Thu Apr 16 22:51:09 2020 +0530
@@ -52,7 +52,7 @@
for name in (b'.username', b'.password'):
if (p + name) not in auth:
auth[p + name] = p
- auth = dict((k, v) for k, v in auth.items() if v is not None)
+ auth = {k: v for k, v in auth.items() if v is not None}
ui = writeauth(auth)
--- a/tests/test-histedit-non-commute-abort.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-histedit-non-commute-abort.t Thu Apr 16 22:51:09 2020 +0530
@@ -77,36 +77,22 @@
insert unsupported advisory merge record
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
$ hg debugmergestate
- * version 2 records
- local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758
- other: e860deea161a2f77de56603b340ebbb4536308ae
- labels:
- local: local
- other: histedit
- unrecognized entry: x advisory record
- file extras: e (ancestorlinknode = 0000000000000000000000000000000000000000)
- file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f)
- local path: e (flags "")
- ancestor path: e (node null)
+ local (local): 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758
+ other (histedit): e860deea161a2f77de56603b340ebbb4536308ae
+ file: e (state "u")
+ local path: e (hash 58e6b3a414a1e090dfc6029add0f3555ccba127f, flags "")
+ ancestor path: e (node 0000000000000000000000000000000000000000)
other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f)
+ extra: ancestorlinknode = 0000000000000000000000000000000000000000
$ hg resolve -l
U e
insert unsupported mandatory merge record
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
$ hg debugmergestate
- * version 2 records
- local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758
- other: e860deea161a2f77de56603b340ebbb4536308ae
- labels:
- local: local
- other: histedit
- file extras: e (ancestorlinknode = 0000000000000000000000000000000000000000)
- file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f)
- local path: e (flags "")
- ancestor path: e (node null)
- other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f)
- unrecognized entry: X mandatory record
+ abort: unsupported merge state records: X
+ (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
+ [255]
$ hg resolve -l
abort: unsupported merge state records: X
(see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
--- a/tests/test-histedit-non-commute.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-histedit-non-commute.t Thu Apr 16 22:51:09 2020 +0530
@@ -300,4 +300,7 @@
summary: Initial commit
+An invalid editor shouldn't leave the user in a broken state:
+ $ EDITOR=totally-not-a-thing-unsensible-editor-value hg histedit 4
+
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hooklib-changeset_obsoleted.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,84 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [experimental]
+ > evolution = true
+ >
+ > [extensions]
+ > notify =
+ > hooklib =
+ >
+ > [phases]
+ > publish = False
+ >
+ > [notify]
+ > sources = pull
+ > diffstat = False
+ > messageidseed = example
+ > domain = example.com
+ >
+ > [reposubs]
+ > * = baz
+ > EOF
+ $ hg init a
+ $ hg --cwd a debugbuilddag +2
+ $ hg init b
+ $ cat <<EOF >> b/.hg/hgrc
+ > [hooks]
+ > incoming.notify = python:hgext.notify.hook
+ > pretxnclose.changeset_obsoleted = python:hgext.hooklib.changeset_obsoleted.hook
+ > EOF
+ $ hg --cwd b pull ../a | "$PYTHON" $TESTDIR/unwrap-message-id.py
+ pulling from ../a
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 0 changes to 0 files
+ new changesets 1ea73414a91b:66f7d451a68b (2 drafts)
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: changeset in * (glob)
+ From: debugbuilddag@example.com
+ X-Hg-Notification: changeset 1ea73414a91b
+ Message-Id: <hg.81c297828fd2d5afaadf2775a6a71b74143b6451dfaac09fac939e9107a50d01@example.com>
+ To: baz@example.com
+
+ changeset 1ea73414a91b in $TESTTMP/b
+ details: $TESTTMP/b?cmd=changeset;node=1ea73414a91b
+ description:
+ r0
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: changeset in * (glob)
+ From: debugbuilddag@example.com
+ X-Hg-Notification: changeset 66f7d451a68b
+ Message-Id: <hg.364d03da7dc13829eb779a805be7e37f54f572e9afcea7d2626856a794d3e8f3@example.com>
+ To: baz@example.com
+
+ changeset 66f7d451a68b in $TESTTMP/b
+ details: $TESTTMP/b?cmd=changeset;node=66f7d451a68b
+ description:
+ r1
+ (run 'hg update' to get a working copy)
+ $ hg --cwd a debugobsolete 1ea73414a91b0920940797d8fc6a11e447f8ea1e
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+ 1 new orphan changesets
+ $ hg --cwd a push ../b --hidden | "$PYTHON" $TESTDIR/unwrap-message-id.py
+ 1 new orphan changesets
+ pushing to ../b
+ searching for changes
+ no changes found
+ Subject: changeset abandoned
+ In-reply-to: <hg.81c297828fd2d5afaadf2775a6a71b74143b6451dfaac09fac939e9107a50d01@example.com>
+ Message-Id: <hg.d6329e9481594f0f3c8a84362b3511318bfbce50748ab1123f909eb6fbcab018@example.com>
+ Date: * (glob)
+ From: test@example.com
+ To: baz@example.com
+
+ This changeset has been abandoned.
+ 1 new obsolescence markers
+ obsoleted 1 changesets
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hooklib-changeset_published.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,84 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [extensions]
+ > notify =
+ > hooklib =
+ >
+ > [phases]
+ > publish = False
+ >
+ > [notify]
+ > sources = pull
+ > diffstat = False
+ > messageidseed = example
+ > domain = example.com
+ >
+ > [reposubs]
+ > * = baz
+ > EOF
+ $ hg init a
+ $ hg --cwd a debugbuilddag .
+ $ hg init b
+ $ cat <<EOF >> b/.hg/hgrc
+ > [hooks]
+ > incoming.notify = python:hgext.notify.hook
+ > txnclose-phase.changeset_published = python:hgext.hooklib.changeset_published.hook
+ > EOF
+ $ hg --cwd b pull ../a | "$PYTHON" $TESTDIR/unwrap-message-id.py
+ pulling from ../a
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ new changesets 1ea73414a91b (1 drafts)
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: changeset in * (glob)
+ From: debugbuilddag@example.com
+ X-Hg-Notification: changeset 1ea73414a91b
+ Message-Id: <hg.81c297828fd2d5afaadf2775a6a71b74143b6451dfaac09fac939e9107a50d01@example.com>
+ To: baz@example.com
+
+ changeset 1ea73414a91b in $TESTTMP/b
+ details: $TESTTMP/b?cmd=changeset;node=1ea73414a91b
+ description:
+ r0
+ (run 'hg update' to get a working copy)
+ $ hg --cwd a phase --public 0
+ $ hg --cwd b pull ../a | "$PYTHON" $TESTDIR/unwrap-message-id.py
+ pulling from ../a
+ searching for changes
+ no changes found
+ 1 local changesets published
+ Subject: changeset published
+ In-reply-to: <hg.81c297828fd2d5afaadf2775a6a71b74143b6451dfaac09fac939e9107a50d01@example.com>
+ Message-Id: <hg.2ec19bbddee5b542442bf5e1aed97bf706afff6aa765629883fbd1f4edd6fcb0@example.com>
+ Date: * (glob)
+ From: test@example.com
+ To: baz@example.com
+
+ This changeset has been published.
+ $ hg --cwd b phase --force --draft 0
+ $ cat <<EOF >> b/.hg/hgrc
+ > [notify_published]
+ > messageidseed = example2
+ > domain = alt.example.com
+ > template = Subject: changeset published
+ > From: hg@example.com\n
+ > This draft changeset has been published.\n
+ > EOF
+ $ hg --cwd b pull ../a | "$PYTHON" $TESTDIR/unwrap-message-id.py
+ pulling from ../a
+ searching for changes
+ no changes found
+ 1 local changesets published
+ Subject: changeset published
+ From: hg@example.com
+ In-reply-to: <hg.e3381dc41c051215e50b1c166a72949d0fff99609eb373420bcb763af80ef230@alt.example.com>
+ Message-Id: <hg.c927f3d324e645a4245bfed20b0efb5b9582999d6be9bef45a37e7ec21208b24@alt.example.com>
+ Date: * (glob)
+ To: baz@example.com
+
+ This draft changeset has been published.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hooklib-enforce_draft_commits.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,45 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [extensions]
+ > hooklib =
+ >
+ > [phases]
+ > publish = False
+ > EOF
+ $ hg init a
+ $ hg --cwd a debugbuilddag .
+ $ hg --cwd a phase --public 0
+ $ hg init b
+ $ cat <<EOF >> b/.hg/hgrc
+ > [hooks]
+ > pretxnclose-phase.enforce_draft_commits = \
+ > python:hgext.hooklib.enforce_draft_commits.hook
+ > EOF
+ $ hg --cwd b pull ../a
+ pulling from ../a
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ error: pretxnclose-phase.enforce_draft_commits hook failed: New changeset 1ea73414a91b in phase 'public' rejected
+ transaction abort!
+ rollback completed
+ abort: New changeset 1ea73414a91b in phase 'public' rejected
+ [255]
+ $ hg --cwd a phase --force --draft 0
+ $ hg --cwd b pull ../a
+ pulling from ../a
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ new changesets 1ea73414a91b (1 drafts)
+ (run 'hg update' to get a working copy)
+ $ hg --cwd a phase --public 0
+ $ hg --cwd b pull ../a
+ pulling from ../a
+ searching for changes
+ no changes found
+ error: pretxnclose-phase.enforce_draft_commits hook failed: Phase change from 'draft' to 'public' for 1ea73414a91b rejected
+ abort: Phase change from 'draft' to 'public' for 1ea73414a91b rejected
+ [255]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hooklib-reject_merge_commits.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,78 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [extensions]
+ > hooklib =
+ >
+ > [phases]
+ > publish = False
+ > EOF
+ $ hg init a
+ $ hg --cwd a debugbuilddag '.:parent.:childa*parent/childa<parent@otherbranch./childa'
+ $ hg --cwd a log -G
+ o changeset: 4:a9fb040caedd
+ |\ branch: otherbranch
+ | | tag: tip
+ | | parent: 3:af739dfc49b4
+ | | parent: 1:66f7d451a68b
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:04 1970 +0000
+ | | summary: r4
+ | |
+ | o changeset: 3:af739dfc49b4
+ | | branch: otherbranch
+ | | parent: 0:1ea73414a91b
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:03 1970 +0000
+ | | summary: r3
+ | |
+ +---o changeset: 2:a6b287721c3b
+ | |/ parent: 0:1ea73414a91b
+ | | parent: 1:66f7d451a68b
+ | | user: debugbuilddag
+ | | date: Thu Jan 01 00:00:02 1970 +0000
+ | | summary: r2
+ | |
+ o | changeset: 1:66f7d451a68b
+ |/ tag: childa
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:01 1970 +0000
+ | summary: r1
+ |
+ o changeset: 0:1ea73414a91b
+ tag: parent
+ user: debugbuilddag
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: r0
+
+ $ hg init b
+ $ cat <<EOF >> b/.hg/hgrc
+ > [hooks]
+ > pretxnchangegroup.reject_merge_commits = \
+ > python:hgext.hooklib.reject_merge_commits.hook
+ > EOF
+ $ hg --cwd b pull ../a -r a6b287721c3b
+ pulling from ../a
+ adding changesets
+ adding manifests
+ adding file changes
+ error: pretxnchangegroup.reject_merge_commits hook failed: a6b287721c3b rejected as merge on the same branch. Please consider rebase.
+ transaction abort!
+ rollback completed
+ abort: a6b287721c3b rejected as merge on the same branch. Please consider rebase.
+ [255]
+ $ hg --cwd b pull ../a -r 1ea73414a91b
+ pulling from ../a
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ new changesets 1ea73414a91b (1 drafts)
+ (run 'hg update' to get a working copy)
+ $ hg --cwd b pull ../a -r a9fb040caedd
+ pulling from ../a
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 3 changesets with 0 changes to 0 files
+ new changesets 66f7d451a68b:a9fb040caedd (3 drafts)
+ (run 'hg update' to get a working copy)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hooklib-reject_new_heads.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,53 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [extensions]
+ > hooklib =
+ >
+ > [phases]
+ > publish = False
+ > EOF
+ $ hg init a
+ $ hg --cwd a debugbuilddag '.:parent.*parent'
+ $ hg --cwd a log -G
+ o changeset: 2:fa942426a6fd
+ | tag: tip
+ | parent: 0:1ea73414a91b
+ | user: debugbuilddag
+ | date: Thu Jan 01 00:00:02 1970 +0000
+ | summary: r2
+ |
+ | o changeset: 1:66f7d451a68b
+ |/ user: debugbuilddag
+ | date: Thu Jan 01 00:00:01 1970 +0000
+ | summary: r1
+ |
+ o changeset: 0:1ea73414a91b
+ tag: parent
+ user: debugbuilddag
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: r0
+
+ $ hg init b
+ $ cat <<EOF >> b/.hg/hgrc
+ > [hooks]
+ > pretxnclose.reject_new_heads = \
+ > python:hgext.hooklib.reject_new_heads.hook
+ > EOF
+ $ hg --cwd b pull ../a
+ pulling from ../a
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ error: pretxnclose.reject_new_heads hook failed: Changes on branch 'default' resulted in multiple heads
+ transaction abort!
+ rollback completed
+ abort: Changes on branch 'default' resulted in multiple heads
+ [255]
+ $ hg --cwd b pull ../a -r 1ea73414a91b
+ pulling from ../a
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ new changesets 1ea73414a91b (1 drafts)
+ (run 'hg update' to get a working copy)
--- a/tests/test-http-bad-server.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-http-bad-server.t Thu Apr 16 22:51:09 2020 +0530
@@ -15,6 +15,8 @@
> sparse-revlog = no
> [devel]
> legacy.exchange = phases
+ > [server]
+ > concurrent-push-mode = strict
> EOF
$ hg init server0
--- a/tests/test-http-protocol.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-http-protocol.t Thu Apr 16 22:51:09 2020 +0530
@@ -321,7 +321,7 @@
s> Content-Type: application/mercurial-cbor\r\n
s> Content-Length: *\r\n (glob)
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
sending heads command
s> setsockopt(6, 1, 1) -> None (?)
s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n
@@ -437,7 +437,7 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-0.1\r\n
- s> Content-Length: 480\r\n
+ s> Content-Length: 503\r\n
s> \r\n
s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
@@ -474,7 +474,7 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-0.1\r\n
- s> Content-Length: 480\r\n
+ s> Content-Length: 503\r\n
s> \r\n
real URL is http://$LOCALIP:$HGPORT/redirected (glob)
s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
@@ -745,7 +745,7 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-0.1\r\n
- s> Content-Length: 480\r\n
+ s> Content-Length: 503\r\n
s> \r\n
real URL is http://$LOCALIP:$HGPORT/redirected (glob)
s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
--- a/tests/test-http.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-http.t Thu Apr 16 22:51:09 2020 +0530
@@ -320,20 +320,20 @@
list of changesets:
7f4e523d01f2cc3765ac8934da3d14db775ff872
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 205 bytes payload
+ bundle2-output-part: "replycaps" 224 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
sending unbundle command
- sending 1013 bytes
+ sending 1040 bytes
devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle
- devel-peer-request: Content-length 1013
+ devel-peer-request: Content-length 1040
devel-peer-request: Content-type application/mercurial-0.1
devel-peer-request: Vary X-HgArg-1,X-HgProto-1
devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
devel-peer-request: 16 bytes of commands arguments in headers
- devel-peer-request: 1013 bytes of data
+ devel-peer-request: 1040 bytes of data
devel-peer-request: finished in *.???? seconds (200) (glob)
bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
--- a/tests/test-import.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-import.t Thu Apr 16 22:51:09 2020 +0530
@@ -1823,7 +1823,8 @@
$ hg status -c .
C a
C b
- $ ls
+ $ ls -A
+ .hg
a
a.rej
b
@@ -1870,7 +1871,8 @@
$ hg status -c .
C a
C b
- $ ls
+ $ ls -A
+ .hg
a
a.rej
b
@@ -1919,7 +1921,8 @@
$ hg status -c .
C a
C b
- $ ls
+ $ ls -A
+ .hg
a
a.rej
b
--- a/tests/test-infinitepush-ci.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-infinitepush-ci.t Thu Apr 16 22:51:09 2020 +0530
@@ -50,7 +50,7 @@
6cb0989601f1 added a
$ scratchnodes
- 6cb0989601f1fb5805238edfb16f3606713d9a0b a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
+ 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
Understanding how data is stored on the bundlestore in server
-------------------------------------------------------------
@@ -61,8 +61,8 @@
index
filebundlestore stores the bundles
- $ ls ../repo/.hg/scratchbranches/filebundlestore/a4/c2/
- a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
+ $ ls ../repo/.hg/scratchbranches/filebundlestore/3b/41/
+ 3b414252ff8acab801318445d88ff48faf4a28c3
index/nodemap stores a map of node id and file in which bundle is stored in filebundlestore
$ ls ../repo/.hg/scratchbranches/index/
@@ -82,7 +82,7 @@
Applying the changeset from the bundlestore
--------------------------------------------
- $ hg unbundle .hg/scratchbranches/filebundlestore/a4/c2/a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
+ $ hg unbundle .hg/scratchbranches/filebundlestore/3b/41/3b414252ff8acab801318445d88ff48faf4a28c3
adding changesets
adding manifests
adding file changes
@@ -133,9 +133,9 @@
Both of the new changesets are stored in a single bundle-file
$ scratchnodes
- 6cb0989601f1fb5805238edfb16f3606713d9a0b a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
- bf8a6e3011b345146bbbedbcb1ebd4837571492a ee41a41cefb7817cbfb235b4f6e9f27dbad6ca1f
- eaba929e866c59bc9a6aada5a9dd2f6990db83c0 ee41a41cefb7817cbfb235b4f6e9f27dbad6ca1f
+ 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
+ bf8a6e3011b345146bbbedbcb1ebd4837571492a 239585f5e61f0c09ce7106bdc1097bff731738f4
+ eaba929e866c59bc9a6aada5a9dd2f6990db83c0 239585f5e61f0c09ce7106bdc1097bff731738f4
Pushing more changesets to the server
-------------------------------------
@@ -158,11 +158,11 @@
Sneak peek into the bundlestore at the server
$ scratchnodes
- 1bb96358eda285b536c6d1c66846a7cdb2336cea 57e00c0d4f26e2a2a72b751b63d9abc4f3eb28e7
- 6cb0989601f1fb5805238edfb16f3606713d9a0b a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
- b4e4bce660512ad3e71189e14588a70ac8e31fef 57e00c0d4f26e2a2a72b751b63d9abc4f3eb28e7
- bf8a6e3011b345146bbbedbcb1ebd4837571492a 57e00c0d4f26e2a2a72b751b63d9abc4f3eb28e7
- eaba929e866c59bc9a6aada5a9dd2f6990db83c0 57e00c0d4f26e2a2a72b751b63d9abc4f3eb28e7
+ 1bb96358eda285b536c6d1c66846a7cdb2336cea 98fbae0016662521b0007da1b7bc349cd3caacd1
+ 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
+ b4e4bce660512ad3e71189e14588a70ac8e31fef 98fbae0016662521b0007da1b7bc349cd3caacd1
+ bf8a6e3011b345146bbbedbcb1ebd4837571492a 98fbae0016662521b0007da1b7bc349cd3caacd1
+ eaba929e866c59bc9a6aada5a9dd2f6990db83c0 98fbae0016662521b0007da1b7bc349cd3caacd1
Checking if `hg pull` pulls something or `hg incoming` shows something
-----------------------------------------------------------------------
@@ -309,14 +309,14 @@
$ cd ../repo
$ scratchnodes
- 1bb96358eda285b536c6d1c66846a7cdb2336cea 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- 6cb0989601f1fb5805238edfb16f3606713d9a0b a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
- 9b42578d44473575994109161430d65dd147d16d 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- b4e4bce660512ad3e71189e14588a70ac8e31fef 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- bf8a6e3011b345146bbbedbcb1ebd4837571492a 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- eaba929e866c59bc9a6aada5a9dd2f6990db83c0 0a6e70ecd5b98d22382f69b93909f557ac6a9927
+ 1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
+ 9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f
- $ hg unbundle .hg/scratchbranches/filebundlestore/0a/6e/0a6e70ecd5b98d22382f69b93909f557ac6a9927
+ $ hg unbundle .hg/scratchbranches/filebundlestore/28/0a/280a46a259a268f0e740c81c5a7751bdbfaec85f
adding changesets
adding manifests
adding file changes
@@ -392,13 +392,13 @@
$ cd ../repo
$ scratchnodes
- 1bb96358eda285b536c6d1c66846a7cdb2336cea 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- 6cb0989601f1fb5805238edfb16f3606713d9a0b a4c202c147a9c4bb91bbadb56321fc5f3950f7f2
+ 1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
99949238d9ac7f2424a33a46dface6f866afd059 090a24fe63f31d3b4bee714447f835c8c362ff57
- 9b42578d44473575994109161430d65dd147d16d 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- b4e4bce660512ad3e71189e14588a70ac8e31fef 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- bf8a6e3011b345146bbbedbcb1ebd4837571492a 0a6e70ecd5b98d22382f69b93909f557ac6a9927
- eaba929e866c59bc9a6aada5a9dd2f6990db83c0 0a6e70ecd5b98d22382f69b93909f557ac6a9927
+ 9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f
+ eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f
$ hg glog
o 6:9b42578d4447 added f
--- a/tests/test-install.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-install.t Thu Apr 16 22:51:09 2020 +0530
@@ -2,12 +2,14 @@
$ hg debuginstall
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re)
checking Python security support (*) (glob)
TLS 1.2 not supported by Python install; network connections lack modern security (?)
SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -16,6 +18,7 @@
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... (*) (glob)
@@ -43,6 +46,7 @@
"hgverextra": "*", (glob)
"problems": 0,
"pythonexe": "*", (glob)
+ "pythonimplementation": "*", (glob)
"pythonlib": "*", (glob)
"pythonsecurity": [*], (glob)
"pythonver": "*.*.*", (glob)
@@ -58,12 +62,14 @@
$ HGUSER= hg debuginstall
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re)
checking Python security support (*) (glob)
TLS 1.2 not supported by Python install; network connections lack modern security (?)
SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -72,6 +78,7 @@
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... (*) (glob)
@@ -103,12 +110,14 @@
$ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re)
checking Python security support (*) (glob)
TLS 1.2 not supported by Python install; network connections lack modern security (?)
SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -117,6 +126,7 @@
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... ($TESTTMP/tools/testeditor.exe)
@@ -128,12 +138,14 @@
$ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re)
checking Python security support (*) (glob)
TLS 1.2 not supported by Python install; network connections lack modern security (?)
SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -142,6 +154,7 @@
checking available compression engines (*zlib*) (glob)
checking available compression engines for wire protocol (*zlib*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates (*mercurial?templates)... (glob)
checking default template (*mercurial?templates?map-cmdline.default) (glob)
checking commit editor... (c:\foo\bar\baz.exe) (windows !)
@@ -185,9 +198,11 @@
$ ./installenv/*/hg debuginstall || cat pip.log
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (3.*) (glob)
checking Python lib (*)... (glob)
checking Python security support (*) (glob)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -196,6 +211,7 @@
checking available compression engines (*) (glob)
checking available compression engines for wire protocol (*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
checking commit editor... (*) (glob)
@@ -221,11 +237,13 @@
$ ./installenv/*/hg debuginstall || cat pip.log
checking encoding (ascii)...
checking Python executable (*) (glob)
+ checking Python implementation (*) (glob)
checking Python version (2.*) (glob)
checking Python lib (*)... (glob)
checking Python security support (*) (glob)
TLS 1.2 not supported by Python install; network connections lack modern security (?)
SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Rust extensions \((installed|missing)\) (re)
checking Mercurial version (*) (glob)
checking Mercurial custom build (*) (glob)
checking module policy (*) (glob)
@@ -234,6 +252,7 @@
checking available compression engines (*) (glob)
checking available compression engines for wire protocol (*) (glob)
checking "re2" regexp engine \((available|missing)\) (re)
+ checking "re2" regexp engine Rust bindings \((installed|missing)\) (re) (rust !)
checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
checking commit editor... (*) (glob)
--- a/tests/test-issue1175.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-issue1175.t Thu Apr 16 22:51:09 2020 +0530
@@ -74,7 +74,7 @@
warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
(use 'hg resolve' and 'hg graft --continue')
- [255]
+ [1]
$ echo a > b
$ echo b3 >> b
$ hg resolve --mark b
--- a/tests/test-issue1802.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-issue1802.t Thu Apr 16 22:51:09 2020 +0530
@@ -52,8 +52,6 @@
Simulate a Windows merge:
$ hg --config extensions.n=$TESTTMP/noexec.py merge --debug
- unmatched files in local:
- b
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: a03b0deabf2b, local: d6fa54f68ae1+, remote: 2d8bcf2dda39
--- a/tests/test-issue522.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-issue522.t Thu Apr 16 22:51:09 2020 +0530
@@ -25,8 +25,6 @@
$ hg ci -qAm 'add bar'
$ hg merge --debug
- unmatched files in local:
- bar
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: bbd179dfa0a7, local: 71766447bdbb+, remote: 4d9e78aaceee
--- a/tests/test-issue672.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-issue672.t Thu Apr 16 22:51:09 2020 +0530
@@ -28,7 +28,8 @@
unmatched files in other:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: '1' -> dst: '1a'
+ on remote side:
+ src: '1' -> dst: '1a'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -56,7 +57,8 @@
unmatched files in local:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: '1' -> dst: '1a' *
+ on local side:
+ src: '1' -> dst: '1a' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -78,7 +80,8 @@
unmatched files in other:
1a
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: '1' -> dst: '1a' *
+ on remote side:
+ src: '1' -> dst: '1a' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
--- a/tests/test-journal-exists.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-journal-exists.t Thu Apr 16 22:51:09 2020 +0530
@@ -15,11 +15,7 @@
$ hg recover
rolling back interrupted transaction
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ (verify step skipped, run `hg verify` to check your repository content)
recover, explicit verify
--- a/tests/test-largefiles.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-largefiles.t Thu Apr 16 22:51:09 2020 +0530
@@ -126,7 +126,9 @@
Invoking status precommit hook
R large1
R normal1
- $ ls
+ $ ls -A
+ .hg
+ .hglf
sub
$ echo "testlargefile" > large1-test
$ hg add --large large1-test
@@ -254,7 +256,8 @@
$ cat sub/large2
large22
$ cd ../archive2
- $ ls
+ $ ls -A
+ .hg_archival.txt
sub
$ cat sub/normal2
normal22
--- a/tests/test-lfs-bundle.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-lfs-bundle.t Thu Apr 16 22:51:09 2020 +0530
@@ -95,3 +95,32 @@
OK
---- Applying src-lfs.bundle to dst-lfs ----
OK
+
+Hint if the cache location cannot be inferred from the environment
+
+#if windows
+ $ unset LOCALAPPDATA
+ $ unset APPDATA
+ $ HGRCPATH= hg config lfs --debug
+ abort: unknown lfs usercache location
+ (define LOCALAPPDATA or APPDATA in the environment, or set lfs.usercache)
+ [255]
+#endif
+
+#if osx
+ $ unset HOME
+ $ HGRCPATH= hg config lfs --debug
+ abort: unknown lfs usercache location
+ (define HOME in the environment, or set lfs.usercache)
+ [255]
+#endif
+
+#if no-windows no-osx
+ $ unset XDG_CACHE_HOME
+ $ unset HOME
+ $ HGRCPATH= hg config lfs --debug
+ abort: unknown lfs usercache location
+ (define XDG_CACHE_HOME or HOME in the environment, or set lfs.usercache)
+ [255]
+#endif
+
--- a/tests/test-lfs-serve-access.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-lfs-serve-access.t Thu Apr 16 22:51:09 2020 +0530
@@ -17,6 +17,7 @@
$ hg init server
$ hg --config "lfs.usercache=$TESTTMP/servercache" \
> --config experimental.lfs.serve=False -R server serve -d \
+ > --config experimental.lfs.worker-enable=False \
> -p $HGPORT --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
$ cat hg.pid >> $DAEMON_PIDS
@@ -65,7 +66,7 @@
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
$ rm -f $TESTTMP/access.log $TESTTMP/errors.log
@@ -165,7 +166,7 @@
$LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob)
@@ -311,7 +312,7 @@
$ cat $TESTTMP/access.log
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
@@ -330,7 +331,7 @@
$LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
@@ -481,7 +482,7 @@
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob)
--- a/tests/test-lfs-serve.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-lfs-serve.t Thu Apr 16 22:51:09 2020 +0530
@@ -65,6 +65,7 @@
> debugprocessors = $TESTTMP/debugprocessors.py
> [experimental]
> lfs.disableusercache = True
+ > lfs.worker-enable = False
> [lfs]
> threshold=10
> [web]
--- a/tests/test-lfs-test-server.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-lfs-test-server.t Thu Apr 16 22:51:09 2020 +0530
@@ -40,6 +40,8 @@
#endif
$ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > lfs.worker-enable = False
> [extensions]
> lfs=
> [lfs]
@@ -294,7 +296,7 @@
bundle2-output-bundle: "HG20", 5 parts total
bundle2-output-part: "replycaps" * bytes payload (glob)
bundle2-output-part: "check:phases" 24 bytes payload
- bundle2-output-part: "check:heads" streamed payload
+ bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
@@ -302,7 +304,7 @@
bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
bundle2-input-part: total payload size 24
- bundle2-input-part: "check:heads" supported
+ bundle2-input-part: "check:updated-heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
--- a/tests/test-log.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-log.t Thu Apr 16 22:51:09 2020 +0530
@@ -2273,6 +2273,8 @@
> from mercurial import namespaces
>
> def reposetup(ui, repo):
+ > if not repo.local():
+ > return
> foo = {b'foo': repo[0].node()}
> names = lambda r: foo.keys()
> namemap = lambda r, name: foo.get(name)
@@ -2328,6 +2330,18 @@
$ cd ..
+New namespace is registered per repo instance, but the template keyword
+is global. So we shouldn't expect the namespace always exists. Using
+ssh:// makes sure a bundle repository is created from scratch. (issue6301)
+
+ $ hg clone -e "'$PYTHON' '$TESTDIR/dummyssh'" \
+ > -qr0 "ssh://user@dummy/`pwd`/a" a-clone
+ $ hg incoming --config extensions.names=names.py -R a-clone \
+ > -e "'$PYTHON' '$TESTDIR/dummyssh'" -T '{bars}\n' -l1
+ comparing with ssh://user@dummy/$TESTTMP/a
+ searching for changes
+
+
hg log -f dir across branches
$ hg init acrossbranches
--- a/tests/test-mactext.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-mactext.t Thu Apr 16 22:51:09 2020 +0530
@@ -7,10 +7,6 @@
> data = data.replace(b'\n', b'\r')
> open(path, 'wb').write(data)
> EOF
- $ cat > print.py <<EOF
- > import sys
- > print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>'))
- > EOF
$ hg init
$ echo '[hooks]' >> .hg/hgrc
$ echo 'pretxncommit.cr = python:hgext.win32text.forbidcr' >> .hg/hgrc
@@ -32,7 +28,9 @@
rollback completed
abort: pretxncommit.cr hook failed
[255]
- $ hg cat f | "$PYTHON" print.py
- hello<LF>
- $ cat f | "$PYTHON" print.py
- hello<CR>
+ $ hg cat f | f --hexdump
+
+ 0000: 68 65 6c 6c 6f 0a |hello.|
+ $ f --hexdump f
+ f:
+ 0000: 68 65 6c 6c 6f 0d |hello.|
--- a/tests/test-manifest.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-manifest.py Thu Apr 16 22:51:09 2020 +0530
@@ -88,6 +88,10 @@
with self.assertRaises(KeyError):
m[b'wat']
+ def testManifestLongHashes(self):
+ m = self.parsemanifest(b'a\0' + b'f' * 64 + b'\n')
+ self.assertEqual(binascii.unhexlify(b'f' * 64), m[b'a'])
+
def testSetItem(self):
want = BIN_HASH_1
@@ -171,7 +175,7 @@
self.assertEqual(want, m[b'foo'])
# make sure the suffix survives a copy
match = matchmod.match(util.localpath(b'/repo'), b'', [b're:foo'])
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(want, m2[b'foo'])
self.assertEqual(1, len(m2))
m2 = m.copy()
@@ -196,7 +200,7 @@
match.matchfn = filt
with self.assertRaises(AssertionError):
- m.matches(match)
+ m._matches(match)
def testRemoveItem(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
@@ -300,7 +304,7 @@
m = self.parsemanifest(A_HUGE_MANIFEST)
match = matchmod.exact([b'file1', b'file200', b'file300'])
- m2 = m.matches(match)
+ m2 = m._matches(match)
w = (b'file1\0%sx\n' b'file200\0%sl\n' b'file300\0%s\n') % (
HASH_2,
@@ -318,7 +322,7 @@
match = matchmod.exact(
[b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt', b'nonexistent']
)
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(
[b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt'], m2.keys()
@@ -332,7 +336,7 @@
match = matchmod.match(
util.localpath(b'/repo'), b'', [b'a/f'], default=b'relpath'
)
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual([], m2.keys())
@@ -343,7 +347,7 @@
flist = m.keys()[80:300]
match = matchmod.exact(flist)
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(flist, m2.keys())
@@ -352,7 +356,7 @@
m = self.parsemanifest(A_DEEPER_MANIFEST)
match = matchmod.match(util.localpath(b'/repo'), b'', [b''])
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(m.keys(), m2.keys())
@@ -364,7 +368,7 @@
match = matchmod.match(
util.localpath(b'/repo'), b'', [b'a/b'], default=b'relpath'
)
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(
[
@@ -388,7 +392,7 @@
m = self.parsemanifest(A_DEEPER_MANIFEST)
match = matchmod.exact([b'a/b'])
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual([], m2.keys())
@@ -400,7 +404,7 @@
match = matchmod.match(
util.localpath(b'/repo'), b'a/b', [b'.'], default=b'relpath'
)
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(
[
@@ -423,7 +427,7 @@
m = self.parsemanifest(A_DEEPER_MANIFEST)
match = matchmod.match(util.localpath(b'/repo'), b'', [b'a/b/*/*.txt'])
- m2 = m.matches(match)
+ m2 = m._matches(match)
self.assertEqual(
[b'a/b/c/bar.txt', b'a/b/c/foo.txt', b'a/b/d/ten.txt'], m2.keys()
--- a/tests/test-merge-changedelete.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-merge-changedelete.t Thu Apr 16 22:51:09 2020 +0530
@@ -76,27 +76,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -145,27 +141,23 @@
R file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -227,27 +219,23 @@
R file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
*** file1 does not exist
--- file2 ---
2
@@ -293,27 +281,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
*** file1 does not exist
--- file2 ---
2
@@ -346,27 +330,23 @@
R file2
R file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "r")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -395,27 +375,23 @@
R file2
R file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "r")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
*** file1 does not exist
--- file2 ---
2
@@ -445,27 +421,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -506,27 +478,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -569,27 +537,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -629,27 +593,23 @@
U file2
U file3
--- debugmergestate ---
- * version 2 records
- local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4
+ other (merge rev): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
- file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11)
- local path: file3 (flags "")
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file3 (state "u")
+ local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "")
ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4)
other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -802,22 +762,18 @@
U file1
U file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -845,22 +801,18 @@
R file1
R file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -886,22 +838,18 @@
R file1
R file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "r", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "r")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
*** file1 does not exist
--- file2 ---
2
@@ -929,22 +877,18 @@
U file1
U file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -980,22 +924,18 @@
U file1
U file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
@@ -1032,22 +972,18 @@
U file1
U file2
--- debugmergestate ---
- * version 2 records
- local: ab57bf49aa276a22d35a473592d4c34b5abc3eff
- other: 10f9a0a634e82080907e62f075ab119cbc565ea6
- labels:
- local: working copy
- other: destination
- file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ other (destination): 10f9a0a634e82080907e62f075ab119cbc565ea6
+ file: file1 (state "u")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be)
- other path: file1 (node null)
- file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff)
- file: file2 (record type "C", state "u", hash null)
- local path: file2 (flags "")
+ other path: file1 (node 0000000000000000000000000000000000000000)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
+ file: file2 (state "u")
+ local path: file2 (hash 0000000000000000000000000000000000000000, flags "")
ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
other path: file2 (node e7c1328648519852e723de86c0c0525acd779257)
+ extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff
--- file1 ---
1
changed
--- a/tests/test-merge-criss-cross.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-merge-criss-cross.t Thu Apr 16 22:51:09 2020 +0530
@@ -410,11 +410,6 @@
note: merging c0ef19750a22+ and 6ca01f7342b9 using bids from ancestors 11b5b303e36c and 154e6000f54e
calculating bids for ancestor 11b5b303e36c
- unmatched files in local:
- d1/a
- d1/b
- unmatched files in other:
- d2/b
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 11b5b303e36c, local: c0ef19750a22+, remote: 6ca01f7342b9
@@ -424,7 +419,8 @@
unmatched files in other:
d2/b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'd1/b' -> dst: 'd2/b'
+ on remote side:
+ src: 'd1/b' -> dst: 'd2/b'
checking for directory renames
discovered dir src: 'd1/' -> dst: 'd2/'
resolving manifests
--- a/tests/test-merge2.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-merge2.t Thu Apr 16 22:51:09 2020 +0530
@@ -50,4 +50,8 @@
adding b
created new head
+ $ hg merge 'wdir()'
+ abort: merging with the working copy has no effect
+ [255]
+
$ cd ..
--- a/tests/test-merge4.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-merge4.t Thu Apr 16 22:51:09 2020 +0530
@@ -23,3 +23,37 @@
abort: cannot commit merge with missing files
[255]
+
+Test conflict*() revsets
+
+# Bad usage
+ $ hg log -r 'conflictlocal(foo)'
+ hg: parse error: conflictlocal takes no arguments
+ [255]
+ $ hg log -r 'conflictother(foo)'
+ hg: parse error: conflictother takes no arguments
+ [255]
+ $ hg co -C .
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# No merge parents when not merging
+ $ hg log -r 'conflictlocal() + conflictother()'
+# No merge parents when there is no conflict
+ $ hg merge 1
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg log -r 'conflictlocal() + conflictother()'
+ $ hg co -C .
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo conflict > b
+ $ hg ci -Aqm 'conflicting change to b'
+ $ hg merge 1
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+# Shows merge parents when there is a conflict
+ $ hg log -r 'conflictlocal()' -T '{rev} {desc}\n'
+ 3 conflicting change to b
+ $ hg log -r 'conflictother()' -T '{rev} {desc}\n'
+ 1 commit #1
--- a/tests/test-mq-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-mq-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -132,7 +132,6 @@
patch didn't work out, merging patcha
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
applying patcha2
now at: patcha2
--- a/tests/test-narrow-acl.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-acl.t Thu Apr 16 22:51:09 2020 +0530
@@ -28,7 +28,8 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
The clone directory should only contain f1 and f2
- $ ls -1 narrowclone1 | sort
+ $ ls -A -1 narrowclone1 | sort
+ .hg
f1
f2
--- a/tests/test-narrow-clone-stream.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-clone-stream.t Thu Apr 16 22:51:09 2020 +0530
@@ -54,7 +54,8 @@
transferred * KB in * seconds (* */sec) (glob)
$ cd narrow
- $ ls
+ $ ls -A
+ .hg
$ hg tracked
I path:dir/src/F10
--- a/tests/test-narrow-clone.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-clone.t Thu Apr 16 22:51:09 2020 +0530
@@ -180,7 +180,8 @@
$ hg tracked
$ hg update
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ ls
+ $ ls -A
+ .hg
$ cd ..
--- a/tests/test-narrow-pull.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-pull.t Thu Apr 16 22:51:09 2020 +0530
@@ -26,7 +26,8 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd narrow
- $ ls
+ $ ls -A
+ .hg
f2
f8
$ cat f2 f8
--- a/tests/test-narrow-trackedcmd.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-trackedcmd.t Thu Apr 16 22:51:09 2020 +0530
@@ -46,7 +46,8 @@
$ cd narrow
$ hg tracked
I path:inside
- $ ls
+ $ ls -A
+ .hg
inside
$ cat inside/f
inside
--- a/tests/test-narrow-widen-no-ellipsis.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-widen-no-ellipsis.t Thu Apr 16 22:51:09 2020 +0530
@@ -55,7 +55,8 @@
added 0 changesets with 1 changes to 1 files
$ hg tracked
I path:inside
- $ ls
+ $ ls -A
+ .hg
inside
$ cat inside/f
inside
--- a/tests/test-narrow-widen.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-narrow-widen.t Thu Apr 16 22:51:09 2020 +0530
@@ -51,7 +51,8 @@
$ cd narrow
$ hg tracked
I path:inside
- $ ls
+ $ ls -A
+ .hg
inside
$ cat inside/f
inside
--- a/tests/test-notify.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-notify.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,13 +1,29 @@
$ cat > $TESTTMP/filter.py <<EOF
> from __future__ import absolute_import, print_function
+ > import io
> import re
> import sys
+ > if sys.version_info[0] >= 3:
+ > sys.stdout = io.TextIOWrapper(
+ > sys.stdout.buffer,
+ > sys.stdout.encoding,
+ > sys.stdout.errors,
+ > newline="\n",
+ > line_buffering=sys.stdout.line_buffering,
+ > )
> print(re.sub("\n[ \t]", " ", sys.stdin.read()), end="")
> EOF
$ cat <<EOF >> $HGRCPATH
+ > [experimental]
+ > evolution = true
+ >
> [extensions]
> notify=
+ > strip=
+ >
+ > [phases]
+ > publish=False
>
> [hooks]
> incoming.notify = python:hgext.notify.hook
@@ -15,6 +31,8 @@
> [notify]
> sources = pull
> diffstat = False
+ > reply-to-predecessor = True
+ > messageidseed = notifyseed
>
> [usersubs]
> foo@bar = *
@@ -151,6 +169,15 @@
"From" field of the notification mail. If not set, take the user from the
pushing repo. Default: False.
+ notify.reply-to-predecessor (EXPERIMENTAL)
+ If set and the changeset has a predecessor in the repository, try to thread
+ the notification mail with the predecessor. This adds the "In-Reply-To"
+ header to the notification mail with a reference to the predecessor with the
+ smallest revision number. Mail threads can still be torn, especially when
+ changesets are folded.
+
+ This option must be used in combination with "notify.messageidseed".
+
If set, the following entries will also be used to customize the
notifications:
@@ -205,7 +232,7 @@
adding manifests
adding file changes
added 1 changesets with 2 changes to 2 files
- new changesets 00a13f371396
+ new changesets 00a13f371396 (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -266,7 +293,7 @@
adding manifests
adding file changes
added 1 changesets with 2 changes to 2 files
- new changesets 00a13f371396
+ new changesets 00a13f371396 (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -316,7 +343,7 @@
adding manifests
adding file changes
added 1 changesets with 2 changes to 2 files
- new changesets 00a13f371396
+ new changesets 00a13f371396 (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -369,7 +396,7 @@
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
- new changesets 3332653e1f3c:fccf66cd0c35
+ new changesets 3332653e1f3c:fccf66cd0c35 (2 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -436,7 +463,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- new changesets 0f25f9c22b4c
+ new changesets 0f25f9c22b4c (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 8bit
@@ -480,7 +507,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- new changesets a846b5f6ebb7
+ new changesets a846b5f6ebb7 (1 drafts)
notify: sending 2 subscribers 1 changes
(run 'hg update' to get a working copy)
$ cat b/mbox | "$PYTHON" $TESTDIR/unwrap-message-id.py | "$PYTHON" $TESTTMP/filter.py
@@ -493,7 +520,7 @@
Subject: long line
From: test@test.com
X-Hg-Notification: changeset a846b5f6ebb7
- Message-Id: <hg.a846b5f6ebb7.*.*@*> (glob)
+ Message-Id: <hg.e7dc7658565793ff33c797e72b7d1f3799347b042af3c40df6d17c8d5c3e560a@test.com>
To: baz@test.com, foo@bar
changeset a846b5f6ebb7 in b
@@ -543,6 +570,8 @@
(branches are permanent and global, did you want a bookmark?)
$ echo a >> a/a
$ hg --cwd a ci -m test -d '1 0'
+ $ echo a >> a/a
+ $ hg --cwd a ci -m test -d '1 0'
$ hg --traceback --cwd b pull ../a | \
> "$PYTHON" $TESTDIR/unwrap-message-id.py | \
> "$PYTHON" $TESTTMP/filter.py
@@ -551,8 +580,8 @@
adding changesets
adding manifests
adding file changes
- added 1 changesets with 1 changes to 1 files
- new changesets f7e5aaed4080
+ added 2 changesets with 2 changes to 1 files
+ new changesets f7e5aaed4080:485bf79b9464 (2 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -561,11 +590,24 @@
Subject: test
From: test@test.com
X-Hg-Notification: changeset f7e5aaed4080
- Message-Id: <hg.f7e5aaed4080.*.*@*> (glob)
+ Message-Id: <hg.12e9ae631e2529e9cfbe7a93be0dd8a401280700640f802a60f20d7be659251d@test.com>
To: baz@test.com, foo@bar, notify@example.com
changeset f7e5aaed4080 in b
description: test
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ X-Test: foo
+ Date: * (glob)
+ Subject: test
+ From: test@test.com
+ X-Hg-Notification: changeset 485bf79b9464
+ Message-Id: <hg.15281d60c27d9d5fb70435d33ebc24cb5aa580f2535988dcb9923c26e8bc5c47@test.com>
+ To: baz@test.com, foo@bar, notify@example.com
+
+ changeset 485bf79b9464 in b
+ description: test
(run 'hg update' to get a working copy)
revset selection: don't send to address that waits for mails
@@ -584,7 +626,7 @@
adding manifests
adding file changes
added 1 changesets with 0 changes to 0 files (+1 heads)
- new changesets 645eb6690ecf
+ new changesets 645eb6690ecf (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -593,7 +635,7 @@
Subject: test
From: test@test.com
X-Hg-Notification: changeset 645eb6690ecf
- Message-Id: <hg.645eb6690ecf.*.*@*> (glob)
+ Message-Id: <hg.ba26b2c63e7deb44e86c934aeea147edde12a11b6ac94bda103dcab5028dc928@test.com>
To: baz@test.com, foo@bar
changeset 645eb6690ecf in b
@@ -616,7 +658,7 @@
Subject: changeset in b: default template
From: test@test.com
X-Hg-Notification: changeset 5cd4346eed47
- Message-Id: <hg.5cd4346eed47.*.*@*> (glob)
+ Message-Id: <hg.8caa7941b24fc673d10910cb072e2d167362a3c5111cafefa47190d9b831f0a3@test.com>
To: baz@test.com, foo@bar
changeset 5cd4346eed47 in $TESTTMP/b
@@ -647,7 +689,7 @@
Subject: with style
From: test@test.com
X-Hg-Notification: changeset ec8d9d852f56
- Message-Id: <hg.ec8d9d852f56.*.*@*> (glob)
+ Message-Id: <hg.ccd5049818a6a277251189ce1d6d0cca10723d58214199e7178894adb99ed918@test.com>
To: baz@test.com, foo@bar
changeset ec8d9d852f56
@@ -672,7 +714,7 @@
Subject: 14721b538ae3: with template
From: test@test.com
X-Hg-Notification: changeset 14721b538ae3
- Message-Id: <hg.14721b538ae3.*.*@*> (glob)
+ Message-Id: <hg.7edb9765307a5a24528f3964672e794e2d21f2479e96c099bf52e02abd17b3a2@test.com>
To: baz@test.com, foo@bar
with template
@@ -695,6 +737,8 @@
> EOF
$ hg commit -Am addfunction
adding f1
+ $ hg debugobsolete eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee b86bc16ff894f057d023b306936f290954857187
+ 1 new obsolescence markers
$ hg --cwd ../b pull ../a | \
> "$PYTHON" $TESTDIR/unwrap-message-id.py
pulling from ../a
@@ -703,7 +747,8 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- new changesets b86bc16ff894
+ 1 new obsolescence markers
+ new changesets b86bc16ff894 (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -711,7 +756,7 @@
Subject: addfunction
From: test@test.com
X-Hg-Notification: changeset b86bc16ff894
- Message-Id: <hg.b86bc16ff894.*.*@*> (glob)
+ Message-Id: <hg.4c7cacfbbd6ba170656be0c8fc0d7599bd925c0d545b836816be9983e6d08448@test.com>
To: baz@test.com, foo@bar
changeset b86bc16ff894
@@ -739,6 +784,9 @@
> }
> EOF
$ hg commit -m changefunction
+ $ hg debugobsolete 485bf79b9464197b2ed2debd0b16252ad64ed458 e81040e9838c704d8bf17658cb11758f24e40b6b
+ 1 new obsolescence markers
+ obsoleted 1 changesets
$ hg --cwd ../b --config notify.showfunc=True pull ../a | \
> "$PYTHON" $TESTDIR/unwrap-message-id.py
pulling from ../a
@@ -747,7 +795,9 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- new changesets e81040e9838c
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+ new changesets e81040e9838c (1 drafts)
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
Content-Transfer-Encoding: 7bit
@@ -755,7 +805,8 @@
Subject: changefunction
From: test@test.com
X-Hg-Notification: changeset e81040e9838c
- Message-Id: <hg.e81040e9838c.*.*@*> (glob)
+ Message-Id: <hg.99b80bf1c5d0bf8f8a7e60107c1aa1da367a5943b2a70a8b36517d701557edff@test.com>
+ In-Reply-To: <hg.15281d60c27d9d5fb70435d33ebc24cb5aa580f2535988dcb9923c26e8bc5c47@test.com>
To: baz@test.com, foo@bar
changeset e81040e9838c
@@ -774,3 +825,50 @@
+ return a + b + c + e;
}
(run 'hg update' to get a working copy)
+
+Retry the In-Reply-To, but make sure the oldest known change is older.
+This can happen when folding commits that have been rebased by another user.
+
+ $ hg --cwd ../b strip tip
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/e81040e9838c-10aad4de-backup.hg
+ $ hg debugobsolete f7e5aaed408029cfe9890318245e87ef44739fdd e81040e9838c704d8bf17658cb11758f24e40b6b
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+ $ hg --cwd ../b --config notify.showfunc=True pull ../a | \
+ > "$PYTHON" $TESTDIR/unwrap-message-id.py
+ pulling from ../a
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ 2 new obsolescence markers
+ obsoleted 2 changesets
+ new changesets e81040e9838c (1 drafts)
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: changefunction
+ From: test@test.com
+ X-Hg-Notification: changeset e81040e9838c
+ Message-Id: <hg.99b80bf1c5d0bf8f8a7e60107c1aa1da367a5943b2a70a8b36517d701557edff@test.com>
+ In-Reply-To: <hg.12e9ae631e2529e9cfbe7a93be0dd8a401280700640f802a60f20d7be659251d@test.com>
+ To: baz@test.com, foo@bar
+
+ changeset e81040e9838c
+ diffs (12 lines):
+
+ diff -r b86bc16ff894 -r e81040e9838c f1
+ --- a/f1 Thu Jan 01 00:00:00 1970 +0000
+ +++ b/f1 Thu Jan 01 00:00:00 1970 +0000
+ @@ -2,6 +2,6 @@ int main() {
+ int a = 0;
+ int b = 1;
+ int c = 2;
+ - int d = 3;
+ - return a + b + c + d;
+ + int e = 3;
+ + return a + b + c + e;
+ }
+ (run 'hg update' to get a working copy)
--- a/tests/test-obsolete-distributed.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-obsolete-distributed.t Thu Apr 16 22:51:09 2020 +0530
@@ -138,12 +138,37 @@
$ hg up 'desc("ROOT")'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- $ hg pull
+ $ hg pull --confirm --config ui.interactive=True << EOF
+ > n
+ > EOF
pulling from $TESTTMP/distributed-chain-building/server
searching for changes
adding changesets
adding manifests
adding file changes
+ adding 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ obsoleting 1 changesets
+ new changesets 391a2bf12b1b (1 drafts)
+ accept incoming changes (yn)? n
+ transaction abort!
+ rollback completed
+ abort: user aborted
+ [255]
+
+ $ hg pull --confirm --config ui.interactive=True << EOF
+ > y
+ > EOF
+ pulling from $TESTTMP/distributed-chain-building/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ adding 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ obsoleting 1 changesets
+ new changesets 391a2bf12b1b (1 drafts)
+ accept incoming changes (yn)? y
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
obsoleted 1 changesets
--- a/tests/test-obsolete-divergent.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-obsolete-divergent.t Thu Apr 16 22:51:09 2020 +0530
@@ -91,6 +91,9 @@
$ hg log -r 'contentdivergent()'
2:82623d38b9ba A_1
3:392fd25390da A_2
+ $ hg log -r 'unstable()'
+ 2:82623d38b9ba A_1
+ 3:392fd25390da A_2
$ hg debugsuccessorssets 'all()' --closest
d20a80d4def3
d20a80d4def3
--- a/tests/test-obsolete.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-obsolete.t Thu Apr 16 22:51:09 2020 +0530
@@ -224,6 +224,9 @@
|
o 0:1f0dee641bb7 (public) [ ] add a
+ $ hg log -r 'unstable()'
+ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
+
And that bumped changeset are detected
--------------------------------------
@@ -377,15 +380,53 @@
2:245bde4270cd (public) [ ] add original_c
6:6f9641995072 (draft) [tip ] add n3w_3_c
-Try to pull markers
+Try to pull markers while testing pull --confirm
(extinct changeset are excluded but marker are pushed)
- $ hg pull ../tmpb
+ $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
+ > n
+ > EOF
pulling from ../tmpb
requesting all changes
adding changesets
adding manifests
adding file changes
+ adding 4 changesets with 4 changes to 4 files (+1 heads)
+ 5 new obsolescence markers
+ new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
+ accept incoming changes (yn)? n
+ transaction abort!
+ rollback completed
+ abort: user aborted
+ [255]
+ $ HGPLAIN=1 hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
+ > n
+ > EOF
+ pulling from ../tmpb
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ adding 4 changesets with 4 changes to 4 files (+1 heads)
+ 5 new obsolescence markers
+ new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
+ accept incoming changes (yn)? n
+ transaction abort!
+ rollback completed
+ abort: user aborted
+ [255]
+ $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
+ > y
+ > EOF
+ pulling from ../tmpb
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ adding 4 changesets with 4 changes to 4 files (+1 heads)
+ 5 new obsolescence markers
+ new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
+ accept incoming changes (yn)? y
added 4 changesets with 4 changes to 4 files (+1 heads)
5 new obsolescence markers
new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
@@ -544,6 +585,8 @@
1 new obsolescence markers
obsoleted 1 changesets
1 new orphan changesets
+ $ hg log -r 'unstable()'
+ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
$ hg debugobsolete | grep `getid original_d`
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
$ hg log -r 'obsolete()'
--- a/tests/test-pathconflicts-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-pathconflicts-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -75,6 +75,12 @@
M a/b/c/d
A a/b~0ed027b96f31
R a/b
+ $ hg debugmergestate
+ local (working copy): 0ed027b96f31a2560c8abe689ba59876409a2b8e
+ other (merge rev): 9049d9534d5c5d16264aab02b4b9e20d03faabef
+ file: a/b (state "pu")
+ rename side: l
+ renamed path: a/b~0ed027b96f31
$ hg resolve --all
a/b: path conflict must be resolved manually
$ hg forget a/b~0ed027b96f31 && rm a/b~0ed027b96f31
@@ -106,6 +112,12 @@
$ hg mv a/b~2ea68033e3be a/b.old
$ hg resolve --mark a/b
(no more unresolved files)
+ $ hg debugmergestate
+ local (working copy): 2ea68033e3be03a560471c1fc9e5704fbedb9b4b
+ other (merge rev): 9049d9534d5c5d16264aab02b4b9e20d03faabef
+ file: a/b (state "pr")
+ rename side: l
+ renamed path: a/b~2ea68033e3be
$ hg resolve --list
R a/b
$ hg commit -m "merge link and dir (renamed link)"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-persistent-nodemap.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,413 @@
+===================================
+Test the persistent on-disk nodemap
+===================================
+
+ $ hg init test-repo
+ $ cd test-repo
+ $ cat << EOF >> .hg/hgrc
+ > [experimental]
+ > exp-persistent-nodemap=yes
+ > [devel]
+ > persistent-nodemap=yes
+ > EOF
+ $ hg debugbuilddag .+5000
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5000
+ tip-node: 06ddac466af534d365326c13c3879f97caca3cb1
+ data-length: 122880
+ data-unused: 0
+ data-unused: 0.000%
+ $ f --size .hg/store/00changelog.n
+ .hg/store/00changelog.n: size=70
+
+Simple lookup works
+
+ $ ANYNODE=`hg log --template '{node|short}\n' --rev tip`
+ $ hg log -r "$ANYNODE" --template '{rev}\n'
+ 5000
+
+
+#if rust
+
+ $ f --sha256 .hg/store/00changelog-*.nd
+ .hg/store/00changelog-????????????????.nd: sha256=1e38e9ffaa45cad13f15c1a9880ad606f4241e8beea2f61b4d5365abadfb55f6 (glob)
+ $ hg debugnodemap --dump-new | f --sha256 --size
+ size=122880, sha256=1e38e9ffaa45cad13f15c1a9880ad606f4241e8beea2f61b4d5365abadfb55f6
+ $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
+ size=122880, sha256=1e38e9ffaa45cad13f15c1a9880ad606f4241e8beea2f61b4d5365abadfb55f6
+ 0000: 00 00 00 76 00 00 01 65 00 00 00 95 00 00 01 34 |...v...e.......4|
+ 0010: 00 00 00 19 00 00 01 69 00 00 00 ab 00 00 00 4b |.......i.......K|
+ 0020: 00 00 00 07 00 00 01 4c 00 00 00 f8 00 00 00 8f |.......L........|
+ 0030: 00 00 00 c0 00 00 00 a7 00 00 00 89 00 00 01 46 |...............F|
+ 0040: 00 00 00 92 00 00 01 bc 00 00 00 71 00 00 00 ac |...........q....|
+ 0050: 00 00 00 af 00 00 00 b4 00 00 00 34 00 00 01 ca |...........4....|
+ 0060: 00 00 00 23 00 00 01 45 00 00 00 2d 00 00 00 b2 |...#...E...-....|
+ 0070: 00 00 00 56 00 00 01 0f 00 00 00 4e 00 00 02 4c |...V.......N...L|
+ 0080: 00 00 00 e7 00 00 00 cd 00 00 01 5b 00 00 00 78 |...........[...x|
+ 0090: 00 00 00 e3 00 00 01 8e 00 00 00 4f 00 00 00 b1 |...........O....|
+ 00a0: 00 00 00 30 00 00 00 11 00 00 00 25 00 00 00 d2 |...0.......%....|
+ 00b0: 00 00 00 ec 00 00 00 69 00 00 01 2b 00 00 01 2e |.......i...+....|
+ 00c0: 00 00 00 aa 00 00 00 15 00 00 00 3a 00 00 01 4e |...........:...N|
+ 00d0: 00 00 00 4d 00 00 00 9d 00 00 00 8e 00 00 00 a4 |...M............|
+ 00e0: 00 00 00 c3 00 00 00 eb 00 00 00 29 00 00 00 ad |...........)....|
+ 00f0: 00 00 01 3a 00 00 01 32 00 00 00 04 00 00 00 53 |...:...2.......S|
+
+
+#else
+
+ $ f --sha256 .hg/store/00changelog-*.nd
+ .hg/store/00changelog-????????????????.nd: sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7 (glob)
+ $ hg debugnodemap --dump-new | f --sha256 --size
+ size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
+ $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
+ size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
+ 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 0010: ff ff ff ff ff ff ff ff ff ff fa c2 ff ff ff ff |................|
+ 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 0030: ff ff ff ff ff ff ed b3 ff ff ff ff ff ff ff ff |................|
+ 0040: ff ff ff ff ff ff ee 34 00 00 00 00 ff ff ff ff |.......4........|
+ 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 0060: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 0080: ff ff ff ff ff ff f8 50 ff ff ff ff ff ff ff ff |.......P........|
+ 0090: ff ff ff ff ff ff ff ff ff ff ec c7 ff ff ff ff |................|
+ 00a0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 00b0: ff ff ff ff ff ff fa be ff ff f2 fc ff ff ff ff |................|
+ 00c0: ff ff ff ff ff ff ef ea ff ff ff ff ff ff f9 17 |................|
+ 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+ 00f0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+
+#endif
+
+ $ hg debugnodemap --check
+ revision in index: 5001
+ revision in nodemap: 5001
+
+add a new commit
+
+ $ hg up
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo foo > foo
+ $ hg add foo
+ $ hg ci -m 'foo'
+
+#if no-pure no-rust
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5001
+ tip-node: 2dd9b5258caa46469ff07d4a3da1eb3529a51f49
+ data-length: 122880
+ data-unused: 0
+ data-unused: 0.000%
+#else
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5001
+ tip-node: 2dd9b5258caa46469ff07d4a3da1eb3529a51f49
+ data-length: 123072
+ data-unused: 192
+ data-unused: 0.156%
+#endif
+
+ $ f --size .hg/store/00changelog.n
+ .hg/store/00changelog.n: size=70
+
+(The pure code use the debug code that perform incremental update, the C code reencode from scratch)
+
+#if pure
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=123072, sha256=136472751566c8198ff09e306a7d2f9bd18bd32298d614752b73da4d6df23340 (glob)
+#endif
+
+#if rust
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=123072, sha256=ccc8a43310ace13812fcc648683e259346754ef934c12dd238cf9b7fadfe9a4b (glob)
+#endif
+
+#if no-pure no-rust
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=122880, sha256=bfafebd751c4f6d116a76a37a1dee2a251747affe7efbcc4f4842ccc746d4db9 (glob)
+#endif
+
+ $ hg debugnodemap --check
+ revision in index: 5002
+ revision in nodemap: 5002
+
+Test code path without mmap
+---------------------------
+
+ $ echo bar > bar
+ $ hg add bar
+ $ hg ci -m 'bar' --config experimental.exp-persistent-nodemap.mmap=no
+
+ $ hg debugnodemap --check --config experimental.exp-persistent-nodemap.mmap=yes
+ revision in index: 5003
+ revision in nodemap: 5003
+ $ hg debugnodemap --check --config experimental.exp-persistent-nodemap.mmap=no
+ revision in index: 5003
+ revision in nodemap: 5003
+
+
+#if pure
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 123328
+ data-unused: 384
+ data-unused: 0.311%
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=123328, sha256=10d26e9776b6596af0f89143a54eba8cc581e929c38242a02a7b0760698c6c70 (glob)
+#endif
+#if rust
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 123328
+ data-unused: 384
+ data-unused: 0.311%
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=123328, sha256=081eec9eb6708f2bf085d939b4c97bc0b6762bc8336bc4b93838f7fffa1516bf (glob)
+#endif
+#if no-pure no-rust
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 122944
+ data-unused: 0
+ data-unused: 0.000%
+ $ f --sha256 .hg/store/00changelog-*.nd --size
+ .hg/store/00changelog-????????????????.nd: size=122944, sha256=755976b22b64ab680401b45395953504e64e7fa8c31ac570f58dee21e15f9bc0 (glob)
+#endif
+
+Test force warming the cache
+
+ $ rm .hg/store/00changelog.n
+ $ hg debugnodemap --metadata
+ $ hg debugupdatecache
+#if pure
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 122944
+ data-unused: 0
+ data-unused: 0.000%
+#else
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 122944
+ data-unused: 0
+ data-unused: 0.000%
+#endif
+
+Check out of sync nodemap
+=========================
+
+First copy old data on the side.
+
+ $ mkdir ../tmp-copies
+ $ cp .hg/store/00changelog-????????????????.nd .hg/store/00changelog.n ../tmp-copies
+
+Nodemap lagging behind
+----------------------
+
+make a new commit
+
+ $ echo bar2 > bar
+ $ hg ci -m 'bar2'
+ $ NODE=`hg log -r tip -T '{node}\n'`
+ $ hg log -r "$NODE" -T '{rev}\n'
+ 5003
+
+If the nodemap is lagging behind, it can catch up fine
+
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5003
+ tip-node: 5c049e9c4a4af159bdcd65dce1b6bf303a0da6cf
+ data-length: 123200 (pure !)
+ data-length: 123200 (rust !)
+ data-length: 122944 (no-rust no-pure !)
+ data-unused: 256 (pure !)
+ data-unused: 256 (rust !)
+ data-unused: 0 (no-rust no-pure !)
+ data-unused: 0.208% (pure !)
+ data-unused: 0.208% (rust !)
+ data-unused: 0.000% (no-rust no-pure !)
+ $ cp -f ../tmp-copies/* .hg/store/
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 122944
+ data-unused: 0
+ data-unused: 0.000%
+ $ hg log -r "$NODE" -T '{rev}\n'
+ 5003
+
+changelog altered
+-----------------
+
+If the nodemap is not gated behind a requirements, an unaware client can alter
+the repository so the revlog used to generate the nodemap is not longer
+compatible with the persistent nodemap. We need to detect that.
+
+ $ hg up "$NODE~5"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo bar > babar
+ $ hg add babar
+ $ hg ci -m 'babar'
+ created new head
+ $ OTHERNODE=`hg log -r tip -T '{node}\n'`
+ $ hg log -r "$OTHERNODE" -T '{rev}\n'
+ 5004
+
+ $ hg --config extensions.strip= strip --rev "$NODE~1" --no-backup
+
+the nodemap should detect the changelog have been tampered with and recover.
+
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 42bf3068c7ddfdfded53c4eb11d02266faeebfee
+ data-length: 123456 (pure !)
+ data-length: 123008 (rust !)
+ data-length: 123008 (no-pure no-rust !)
+ data-unused: 448 (pure !)
+ data-unused: 0 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.000% (rust !)
+ data-unused: 0.363% (pure !)
+ data-unused: 0.000% (no-pure no-rust !)
+
+ $ cp -f ../tmp-copies/* .hg/store/
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5002
+ tip-node: 6ce944fafcee85af91f29ea5b51654cc6101ad7e
+ data-length: 122944
+ data-unused: 0
+ data-unused: 0.000%
+ $ hg log -r "$OTHERNODE" -T '{rev}\n'
+ 5002
+
+Check transaction related property
+==================================
+
+An up to date nodemap should be available to shell hooks,
+
+ $ echo dsljfl > a
+ $ hg add a
+ $ hg ci -m a
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5003
+ tip-node: c91af76d172f1053cca41b83f7c2e4e514fe2bcf
+ data-length: 123008
+ data-unused: 0
+ data-unused: 0.000%
+ $ echo babar2 > babar
+ $ hg ci -m 'babar2' --config "hooks.pretxnclose.nodemap-test=hg debugnodemap --metadata"
+ uid: ???????????????? (glob)
+ tip-rev: 5004
+ tip-node: ba87cd9559559e4b91b28cb140d003985315e031
+ data-length: 123328 (pure !)
+ data-length: 123328 (rust !)
+ data-length: 123136 (no-pure no-rust !)
+ data-unused: 192 (pure !)
+ data-unused: 192 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.156% (pure !)
+ data-unused: 0.156% (rust !)
+ data-unused: 0.000% (no-pure no-rust !)
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5004
+ tip-node: ba87cd9559559e4b91b28cb140d003985315e031
+ data-length: 123328 (pure !)
+ data-length: 123328 (rust !)
+ data-length: 123136 (no-pure no-rust !)
+ data-unused: 192 (pure !)
+ data-unused: 192 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.156% (pure !)
+ data-unused: 0.156% (rust !)
+ data-unused: 0.000% (no-pure no-rust !)
+
+Another process does not see the pending nodemap content during run.
+
+ $ PATH=$RUNTESTDIR/testlib/:$PATH
+ $ echo qpoasp > a
+ $ hg ci -m a2 \
+ > --config "hooks.pretxnclose=wait-on-file 20 sync-repo-read sync-txn-pending" \
+ > --config "hooks.txnclose=touch sync-txn-close" > output.txt 2>&1 &
+
+(read the repository while the commit transaction is pending)
+
+ $ wait-on-file 20 sync-txn-pending && \
+ > hg debugnodemap --metadata && \
+ > wait-on-file 20 sync-txn-close sync-repo-read
+ uid: ???????????????? (glob)
+ tip-rev: 5004
+ tip-node: ba87cd9559559e4b91b28cb140d003985315e031
+ data-length: 123328 (pure !)
+ data-length: 123328 (rust !)
+ data-length: 123136 (no-pure no-rust !)
+ data-unused: 192 (pure !)
+ data-unused: 192 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.156% (pure !)
+ data-unused: 0.156% (rust !)
+ data-unused: 0.000% (no-pure no-rust !)
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5005
+ tip-node: bae4d45c759e30f1cb1a40e1382cf0e0414154db
+ data-length: 123584 (pure !)
+ data-length: 123584 (rust !)
+ data-length: 123136 (no-pure no-rust !)
+ data-unused: 448 (pure !)
+ data-unused: 448 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.363% (pure !)
+ data-unused: 0.363% (rust !)
+ data-unused: 0.000% (no-pure no-rust !)
+
+ $ cat output.txt
+
+Check that a failing transaction will properly revert the data
+
+ $ echo plakfe > a
+ $ f --size --sha256 .hg/store/00changelog-*.nd
+ .hg/store/00changelog-????????????????.nd: size=123584, sha256=8c6cef6fd3d3fac291968793ee19a4be6d0b8375e9508bd5c7d4a8879e8df180 (glob) (pure !)
+ .hg/store/00changelog-????????????????.nd: size=123584, sha256=eb9e9a4bcafdb5e1344bc8a0cbb3288b2106413b8efae6265fb8a7973d7e97f9 (glob) (rust !)
+ .hg/store/00changelog-????????????????.nd: size=123136, sha256=4f504f5a834db3811ced50ab3e9e80bcae3581bb0f9b13a7a9f94b7fc34bcebe (glob) (no-pure no-rust !)
+ $ hg ci -m a3 --config "extensions.abort=$RUNTESTDIR/testlib/crash_transaction_late.py"
+ transaction abort!
+ rollback completed
+ abort: This is a late abort
+ [255]
+ $ hg debugnodemap --metadata
+ uid: ???????????????? (glob)
+ tip-rev: 5005
+ tip-node: bae4d45c759e30f1cb1a40e1382cf0e0414154db
+ data-length: 123584 (pure !)
+ data-length: 123584 (rust !)
+ data-length: 123136 (no-pure no-rust !)
+ data-unused: 448 (pure !)
+ data-unused: 448 (rust !)
+ data-unused: 0 (no-pure no-rust !)
+ data-unused: 0.363% (pure !)
+ data-unused: 0.363% (rust !)
+ data-unused: 0.000% (no-pure no-rust !)
+ $ f --size --sha256 .hg/store/00changelog-*.nd
+ .hg/store/00changelog-????????????????.nd: size=123584, sha256=8c6cef6fd3d3fac291968793ee19a4be6d0b8375e9508bd5c7d4a8879e8df180 (glob) (pure !)
+ .hg/store/00changelog-????????????????.nd: size=123584, sha256=eb9e9a4bcafdb5e1344bc8a0cbb3288b2106413b8efae6265fb8a7973d7e97f9 (glob) (rust !)
+ .hg/store/00changelog-????????????????.nd: size=123136, sha256=4f504f5a834db3811ced50ab3e9e80bcae3581bb0f9b13a7a9f94b7fc34bcebe (glob) (no-pure no-rust !)
--- a/tests/test-phabricator.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-phabricator.t Thu Apr 16 22:51:09 2020 +0530
@@ -2,13 +2,6 @@
$ cat >> $HGRCPATH <<EOF
> [extensions]
> phabricator =
- > EOF
- $ hg init repo
- $ cd repo
- $ cat >> .hg/hgrc <<EOF
- > [phabricator]
- > url = https://phab.mercurial-scm.org/
- > callsign = HG
>
> [auth]
> hgphab.schemes = https
@@ -18,6 +11,16 @@
> # it back. The VCR transcripts will be auto-sanitised to replace your real
> # token with this value.
> hgphab.phabtoken = cli-hahayouwish
+ >
+ > [phabricator]
+ > debug = True
+ > EOF
+ $ hg init repo
+ $ cd repo
+ $ cat >> .hg/hgrc <<EOF
+ > [phabricator]
+ > url = https://phab.mercurial-scm.org/
+ > callsign = HG
> EOF
$ VCR="$TESTDIR/phabricator"
@@ -29,6 +32,22 @@
> --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
+Missing arguments don't crash, and may print the command help
+
+ $ hg debugcallconduit
+ hg debugcallconduit: invalid arguments
+ hg debugcallconduit METHOD
+
+ call Conduit API
+
+ options:
+
+ (use 'hg debugcallconduit -h' to show more help)
+ [255]
+ $ hg phabread
+ abort: empty DREVSPEC set
+ [255]
+
Basic phabread:
$ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
# HG changeset patch
@@ -42,6 +61,22 @@
This commit establishes a new exchangev2 module for holding
+Phabread with multiple DREVSPEC
+
+TODO: attempt to order related revisions like --stack?
+ $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
+ > | grep '^Differential Revision'
+ Differential Revision: https://phab.mercurial-scm.org/D8205
+ Differential Revision: https://phab.mercurial-scm.org/D8206
+ Differential Revision: https://phab.mercurial-scm.org/D8207
+
+Empty DREVSPECs don't crash
+
+ $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
+ abort: empty DREVSPEC set
+ [255]
+
+
phabupdate with an accept:
$ hg phabupdate --accept D4564 \
> -m 'I think I like where this is headed. Will read rest of series later.'\
@@ -58,6 +93,7 @@
adding alpha
$ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
+ new commits: ['347bf67801e5']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
$ echo more >> alpha
$ HGEDITOR=true hg ci --amend
@@ -66,8 +102,10 @@
$ hg ci --addremove -m 'create beta for phabricator test'
adding beta
$ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
+ c44b38f24a45 mapped to old nodes []
D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
D7916 - created - 9e6901f21d5b: create beta for phabricator test
+ new commits: ['a692622e6937']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
$ unset HGENCODING
@@ -83,6 +121,7 @@
D7917 - created - 7b4185ab5d16: create public change for phabricator testing
D7918 - created - 251c1c333fc6: create draft change for phabricator testing
warning: not updating public commit 2:7b4185ab5d16
+ new commits: ['3244dc4a3334']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
$ hg tags -v
tip 3:3244dc4a3334
@@ -128,17 +167,193 @@
adding comment
$ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
D7919 - created - d5dddca9023d: create comment for phabricator test
+ new commits: ['f7db812bbe1d']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
$ echo comment2 >> comment
$ hg ci --amend
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
$ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
+ 1849d7828727 mapped to old nodes []
D7919 - updated - 1849d7828727: create comment for phabricator test
Phabsending a skipped commit:
$ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
+ 1849d7828727 mapped to old nodes ['1849d7828727']
D7919 - skipped - 1849d7828727: create comment for phabricator test
+Phabsend doesn't create an instability when rebasing existing revisions on top
+of new revisions.
+
+ $ hg init reorder
+ $ cd reorder
+ $ cat >> .hg/hgrc <<EOF
+ > [phabricator]
+ > url = https://phab.mercurial-scm.org/
+ > callsign = HG
+ > [experimental]
+ > evolution = all
+ > EOF
+
+ $ echo "add" > file1.txt
+ $ hg ci -Aqm 'added'
+ $ echo "mod1" > file1.txt
+ $ hg ci -m 'modified 1'
+ $ echo "mod2" > file1.txt
+ $ hg ci -m 'modified 2'
+ $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
+ D8433 - created - 5d3959e20d1d: modified 2
+ new commits: ['2b4aa8a88d61']
+ $ hg log -G -T compact
+ @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
+ | modified 2
+ |
+ o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
+ | modified 1
+ |
+ o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
+ added
+
+Also check that it doesn't create more orphans outside of the stack
+
+ $ hg up -q 1
+ $ echo "mod3" > file1.txt
+ $ hg ci -m 'modified 3'
+ created new head
+ $ hg up -q 3
+ $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
+ 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
+ D8434 - created - d549263bcb2d: modified 1
+ D8433 - updated - 2b4aa8a88d61: modified 2
+ new commits: ['876a60d024de']
+ new commits: ['0c6523cb1d0f']
+ restabilizing 1eda4bf55021 as d2c78c3a3e01
+ $ hg log -G -T compact
+ o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
+ | modified 3
+ |
+ | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
+ |/ modified 2
+ |
+ o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
+ | modified 1
+ |
+ o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
+ added
+
+Posting obsolete commits is disallowed
+
+ $ echo "mod3" > file1.txt
+ $ hg ci -m 'modified A'
+ $ echo "mod4" > file1.txt
+ $ hg ci -m 'modified B'
+
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo 'obsolete' > file1.txt
+ $ hg amend --config extensions.amend=
+ 1 new orphan changesets
+ $ hg log -G
+ @ changeset: 10:082be6c94150
+ | tag: tip
+ | parent: 6:0c6523cb1d0f
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified A
+ |
+ | * changeset: 9:a67643f48146
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | instability: orphan
+ | | summary: modified B
+ | |
+ | x changeset: 8:db79727cb2f7
+ |/ parent: 6:0c6523cb1d0f
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend as 10:082be6c94150
+ | summary: modified A
+ |
+ | o changeset: 7:d2c78c3a3e01
+ | | parent: 5:876a60d024de
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: modified 3
+ | |
+ o | changeset: 6:0c6523cb1d0f
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified 2
+ |
+ o changeset: 5:876a60d024de
+ | parent: 0:5cbade24e0fa
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified 1
+ |
+ o changeset: 0:5cbade24e0fa
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: added
+
+ $ hg phabsend -r 5::
+ abort: obsolete commits cannot be posted for review
+ [255]
+
+Don't restack existing orphans
+
+ $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
+ 876a60d024de mapped to old nodes ['876a60d024de']
+ 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
+ D8434 - updated - 876a60d024de: modified 1
+ D8433 - updated - 0c6523cb1d0f: modified 2
+ D8435 - created - 082be6c94150: modified A
+ new commits: ['b5913193c805']
+ not restabilizing unchanged d2c78c3a3e01
+ $ hg log -G
+ @ changeset: 11:b5913193c805
+ | tag: tip
+ | parent: 6:0c6523cb1d0f
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified A
+ |
+ | * changeset: 9:a67643f48146
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | instability: orphan
+ | | summary: modified B
+ | |
+ | x changeset: 8:db79727cb2f7
+ |/ parent: 6:0c6523cb1d0f
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | obsolete: rewritten using amend, phabsend as 11:b5913193c805
+ | summary: modified A
+ |
+ | o changeset: 7:d2c78c3a3e01
+ | | parent: 5:876a60d024de
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: modified 3
+ | |
+ o | changeset: 6:0c6523cb1d0f
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified 2
+ |
+ o changeset: 5:876a60d024de
+ | parent: 0:5cbade24e0fa
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: modified 1
+ |
+ o changeset: 0:5cbade24e0fa
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: added
+
+ $ cd ..
+
Phabesending a new binary, a modified binary, and a removed binary
>>> open('bin', 'wb').write(b'\0a') and None
@@ -154,6 +369,9 @@
uploading bin@d8d62a881b54
D8008 - created - d8d62a881b54: modify binary
D8009 - created - af55645b2e29: remove binary
+ new commits: ['b8139fbb4a57']
+ new commits: ['c88ce4c2d2ad']
+ new commits: ['75dbbc901145']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
Phabsend a renamed binary and a copied binary, with and without content changes
@@ -202,6 +420,11 @@
uploading bin2_moved_copied@1b87b363a5e4
uploading bin2_moved@1b87b363a5e4
D8132 - created - 1b87b363a5e4: copy+mod moved binary
+ new commits: ['90437c20312a']
+ new commits: ['f391f4da4c61']
+ new commits: ['da86a9f3268c']
+ new commits: ['003ffc16ba66']
+ new commits: ['13bd750c36fa']
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
Phabreading a DREV with a local:commits time as a string:
@@ -280,6 +503,7 @@
$ hg config phabricator --debug
invalid JSON in $TESTTMP/repo/.arcconfig
read config from: */.hgrc (glob)
+ */.hgrc:*: phabricator.debug=True (glob)
$TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
$TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
@@ -293,6 +517,7 @@
$ mv .hg/hgrc .hg/hgrc.bak
$ hg config phabricator --debug
read config from: */.hgrc (glob)
+ */.hgrc:*: phabricator.debug=True (glob)
$TESTTMP/repo/.arcconfig: phabricator.callsign=HG
$TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
@@ -304,8 +529,433 @@
> EOF
$ hg config phabricator --debug
read config from: */.hgrc (glob)
+ */.hgrc:*: phabricator.debug=True (glob)
$TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
$TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
$ mv .hg/hgrc.bak .hg/hgrc
+Phabimport works with a stack
+
$ cd ..
+ $ hg clone repo repo2 -qr 1
+ $ cp repo/.hg/hgrc repo2/.hg/
+ $ cd repo2
+ $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
+ applying patch from D7917
+ applying patch from D7918
+ $ hg log -r .: -G -Tcompact
+ o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
+ | create draft change for phabricator testing
+ |
+ o 2 8de3712202d1 1970-01-01 00:00 +0000 test
+ | create public change for phabricator testing
+ |
+ @ 1 a692622e6937 1970-01-01 00:00 +0000 test
+ | create beta for phabricator test
+ ~
+Phabimport can create secret commits
+
+ $ hg rollback --config ui.rollback=True
+ repository tip rolled back to revision 1 (undo phabimport)
+ $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
+ > --config phabimport.secret=True
+ applying patch from D7917
+ applying patch from D7918
+ $ hg log -r 'reverse(.:)' -T phases
+ changeset: 3:aaef04066140
+ tag: tip
+ phase: secret
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: create draft change for phabricator testing
+
+ changeset: 2:8de3712202d1
+ phase: secret
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: create public change for phabricator testing
+
+ changeset: 1:a692622e6937
+ phase: public
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: create beta for phabricator test
+
+Phabimport accepts multiple DREVSPECs
+
+ $ hg rollback --config ui.rollback=True
+ repository tip rolled back to revision 1 (undo phabimport)
+ $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
+ applying patch from D7917
+ applying patch from D7918
+
+Validate arguments with --fold
+
+ $ hg phabsend --fold -r 1
+ abort: cannot fold a single revision
+ [255]
+ $ hg phabsend --fold --no-amend -r 1::
+ abort: cannot fold with --no-amend
+ [255]
+ $ hg phabsend --fold -r 0+3
+ abort: cannot fold non-linear revisions
+ [255]
+ $ hg phabsend --fold -r 1::
+ abort: cannot fold revisions with different DREV values
+ [255]
+
+Setup a series of commits to be folded, and include the Test Plan field multiple
+times to test the concatenation logic. No Test Plan field in the last one to
+ensure missing fields are skipped.
+
+ $ hg init ../folded
+ $ cd ../folded
+ $ cat >> .hg/hgrc <<EOF
+ > [phabricator]
+ > url = https://phab.mercurial-scm.org/
+ > callsign = HG
+ > EOF
+
+ $ echo 'added' > file.txt
+ $ hg ci -Aqm 'added file'
+
+ $ cat > log.txt <<EOF
+ > one: first commit to review
+ >
+ > This file was modified with 'mod1' as its contents.
+ >
+ > Test Plan:
+ > LOL! What testing?!
+ > EOF
+ $ echo mod1 > file.txt
+ $ hg ci -l log.txt
+
+ $ cat > log.txt <<EOF
+ > two: second commit to review
+ >
+ > This file was modified with 'mod2' as its contents.
+ >
+ > Test Plan:
+ > Haha! yeah, right.
+ >
+ > EOF
+ $ echo mod2 > file.txt
+ $ hg ci -l log.txt
+
+ $ echo mod3 > file.txt
+ $ hg ci -m '3: a commit with no detailed message'
+
+The folding of immutable commits works...
+
+ $ hg phase -r tip --public
+ $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
+ D8386 - created - a959a3f69d8d: one: first commit to review
+ D8386 - created - 24a4438154ba: two: second commit to review
+ D8386 - created - d235829e802c: 3: a commit with no detailed message
+ warning: not updating public commit 1:a959a3f69d8d
+ warning: not updating public commit 2:24a4438154ba
+ warning: not updating public commit 3:d235829e802c
+ no newnodes to update
+
+ $ hg phase -r 0 --draft --force
+
+... as does the initial mutable fold...
+
+ $ echo y | hg phabsend --fold --confirm -r 1:: \
+ > --test-vcr "$VCR/phabsend-fold-initial.json"
+ NEW - a959a3f69d8d: one: first commit to review
+ NEW - 24a4438154ba: two: second commit to review
+ NEW - d235829e802c: 3: a commit with no detailed message
+ Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
+ D8387 - created - a959a3f69d8d: one: first commit to review
+ D8387 - created - 24a4438154ba: two: second commit to review
+ D8387 - created - d235829e802c: 3: a commit with no detailed message
+ updating local commit list for D8387
+ new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
+ saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
+
+... and doesn't mangle the local commits.
+
+ $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
+ 3:921f8265efbd
+ 3: a commit with no detailed message
+
+ Differential Revision: https://phab.mercurial-scm.org/D8387
+ 2:832553266fe8
+ two: second commit to review
+
+ This file was modified with 'mod2' as its contents.
+
+ Test Plan:
+ Haha! yeah, right.
+
+ Differential Revision: https://phab.mercurial-scm.org/D8387
+ 1:602c4e738243
+ one: first commit to review
+
+ This file was modified with 'mod1' as its contents.
+
+ Test Plan:
+ LOL! What testing?!
+
+ Differential Revision: https://phab.mercurial-scm.org/D8387
+ 0:98d480e0d494
+ added file
+
+Setup some obsmarkers by adding a file to the middle commit. This stress tests
+getoldnodedrevmap() in later phabsends.
+
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo 'modified' > file2.txt
+ $ hg add file2.txt
+ $ hg amend --config experimental.evolution=all --config extensions.amend=
+ 1 new orphan changesets
+ $ hg up 3
+ obsolete feature not enabled but 1 markers found!
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg rebase --config experimental.evolution=all --config extensions.rebase=
+ note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
+ rebasing 3:921f8265efbd "3: a commit with no detailed message"
+
+When commits have changed locally, the local commit list on Phabricator is
+updated.
+
+ $ echo y | hg phabsend --fold --confirm -r 1:: \
+ > --test-vcr "$VCR/phabsend-fold-updated.json"
+ obsolete feature not enabled but 2 markers found!
+ 602c4e738243 mapped to old nodes ['602c4e738243']
+ 0124e5474c88 mapped to old nodes ['832553266fe8']
+ e4edb1fe3565 mapped to old nodes ['921f8265efbd']
+ D8387 - 602c4e738243: one: first commit to review
+ D8387 - 0124e5474c88: two: second commit to review
+ D8387 - e4edb1fe3565: 3: a commit with no detailed message
+ Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
+ D8387 - updated - 602c4e738243: one: first commit to review
+ D8387 - updated - 0124e5474c88: two: second commit to review
+ D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
+ obsolete feature not enabled but 2 markers found! (?)
+ updating local commit list for D8387
+ new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
+ $ hg log -Tcompact
+ obsolete feature not enabled but 2 markers found!
+ 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
+ 3: a commit with no detailed message
+
+ 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
+ two: second commit to review
+
+ 1 602c4e738243 1970-01-01 00:00 +0000 test
+ one: first commit to review
+
+ 0 98d480e0d494 1970-01-01 00:00 +0000 test
+ added file
+
+When nothing has changed locally since the last phabsend, the commit list isn't
+updated, and nothing is changed locally afterward.
+
+ $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
+ obsolete feature not enabled but 2 markers found!
+ 602c4e738243 mapped to old nodes ['602c4e738243']
+ 0124e5474c88 mapped to old nodes ['0124e5474c88']
+ e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
+ D8387 - updated - 602c4e738243: one: first commit to review
+ D8387 - updated - 0124e5474c88: two: second commit to review
+ D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
+ obsolete feature not enabled but 2 markers found! (?)
+ local commit list for D8387 is already up-to-date
+ $ hg log -Tcompact
+ obsolete feature not enabled but 2 markers found!
+ 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
+ 3: a commit with no detailed message
+
+ 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
+ two: second commit to review
+
+ 1 602c4e738243 1970-01-01 00:00 +0000 test
+ one: first commit to review
+
+ 0 98d480e0d494 1970-01-01 00:00 +0000 test
+ added file
+
+Fold will accept new revisions at the end...
+
+ $ echo 'another mod' > file2.txt
+ $ hg ci -m 'four: extend the fold range'
+ obsolete feature not enabled but 2 markers found!
+ $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
+ > --config experimental.evolution=all
+ 602c4e738243 mapped to old nodes ['602c4e738243']
+ 0124e5474c88 mapped to old nodes ['0124e5474c88']
+ e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
+ D8387 - updated - 602c4e738243: one: first commit to review
+ D8387 - updated - 0124e5474c88: two: second commit to review
+ D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
+ D8387 - created - 94aaae213b23: four: extend the fold range
+ updating local commit list for D8387
+ new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
+ $ hg log -r . -T '{desc}\n'
+ four: extend the fold range
+
+ Differential Revision: https://phab.mercurial-scm.org/D8387
+ $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
+ obsolete feature not enabled but 3 markers found!
+ 1 https://phab.mercurial-scm.org/D8387 D8387
+ 4 https://phab.mercurial-scm.org/D8387 D8387
+ 5 https://phab.mercurial-scm.org/D8387 D8387
+ 7 https://phab.mercurial-scm.org/D8387 D8387
+
+... and also accepts new revisions at the beginning of the range
+
+It's a bit unfortunate that not having a Differential URL on the first commit
+causes a new Differential Revision to be created, though it isn't *entirely*
+unreasonable. At least this updates the subsequent commits.
+
+TODO: See if it can reuse the existing Differential.
+
+ $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
+ > --config experimental.evolution=all
+ 602c4e738243 mapped to old nodes ['602c4e738243']
+ 0124e5474c88 mapped to old nodes ['0124e5474c88']
+ e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
+ 51a04fea8707 mapped to old nodes ['51a04fea8707']
+ D8388 - created - 98d480e0d494: added file
+ D8388 - updated - 602c4e738243: one: first commit to review
+ D8388 - updated - 0124e5474c88: two: second commit to review
+ D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
+ D8388 - updated - 51a04fea8707: four: extend the fold range
+ updating local commit list for D8388
+ new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
+
+ $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
+ obsolete feature not enabled but 8 markers found!
+ 12:ac7db67f0991
+ four: extend the fold range
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+ 11:30682b960804
+ 3: a commit with no detailed message
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+ 10:3ee132d41dbc
+ two: second commit to review
+
+ This file was modified with 'mod2' as its contents.
+
+ Test Plan:
+ Haha! yeah, right.
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+ 9:6320b7d714cf
+ one: first commit to review
+
+ This file was modified with 'mod1' as its contents.
+
+ Test Plan:
+ LOL! What testing?!
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+ 8:15e9b14b4b4c
+ added file
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+
+Test phabsend --fold with an `hg split` at the end of the range
+
+ $ echo foo > file3.txt
+ $ hg add file3.txt
+
+ $ hg log -r . -T '{desc}' > log.txt
+ $ echo 'amended mod' > file2.txt
+ $ hg ci --amend -l log.txt --config experimental.evolution=all
+
+ $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
+ > --config experimental.evolution=all split -r .
+ > n
+ > y
+ > y
+ > y
+ > y
+ > EOF
+ diff --git a/file2.txt b/file2.txt
+ 1 hunks, 1 lines changed
+ examine changes to 'file2.txt'?
+ (enter ? for help) [Ynesfdaq?] n
+
+ diff --git a/file3.txt b/file3.txt
+ new file mode 100644
+ examine changes to 'file3.txt'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +foo
+ record change 2/2 to 'file3.txt'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ created new head
+ diff --git a/file2.txt b/file2.txt
+ 1 hunks, 1 lines changed
+ examine changes to 'file2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ @@ -1,1 +1,1 @@
+ -modified
+ +amended mod
+ record this change to 'file2.txt'?
+ (enter ? for help) [Ynesfdaq?] y
+
+ $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
+ > --config experimental.evolution=all
+ 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
+ 6320b7d714cf mapped to old nodes ['6320b7d714cf']
+ 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
+ 30682b960804 mapped to old nodes ['30682b960804']
+ 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
+ b50946d5e490 mapped to old nodes ['ac7db67f0991']
+ D8388 - updated - 15e9b14b4b4c: added file
+ D8388 - updated - 6320b7d714cf: one: first commit to review
+ D8388 - updated - 3ee132d41dbc: two: second commit to review
+ D8388 - updated - 30682b960804: 3: a commit with no detailed message
+ D8388 - updated - 6bc15dc99efd: four: extend the fold range
+ D8388 - updated - b50946d5e490: four: extend the fold range
+ updating local commit list for D8388
+ new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
+
+Test phabsend --fold with an `hg fold` at the end of the range
+
+ $ hg --config experimental.evolution=all --config extensions.rebase= \
+ > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
+ rebasing 14:6bc15dc99efd "four: extend the fold range"
+ rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
+
+ $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
+ > --config experimental.evolution=all
+ 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
+ 6320b7d714cf mapped to old nodes ['6320b7d714cf']
+ 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
+ 30682b960804 mapped to old nodes ['30682b960804']
+ e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
+ D8388 - updated - 15e9b14b4b4c: added file
+ D8388 - updated - 6320b7d714cf: one: first commit to review
+ D8388 - updated - 3ee132d41dbc: two: second commit to review
+ D8388 - updated - 30682b960804: 3: a commit with no detailed message
+ D8388 - updated - e919cdf3d4fe: four: extend the fold range
+ updating local commit list for D8388
+ new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
+
+ $ hg log -r tip -v
+ obsolete feature not enabled but 12 markers found!
+ changeset: 16:e919cdf3d4fe
+ tag: tip
+ parent: 11:30682b960804
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ files: file2.txt file3.txt
+ description:
+ four: extend the fold range
+
+ Differential Revision: https://phab.mercurial-scm.org/D8388
+
+
+
+ $ cd ..
--- a/tests/test-phases-exchange.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-phases-exchange.t Thu Apr 16 22:51:09 2020 +0530
@@ -326,12 +326,18 @@
o 0 public a-A - 054250a37db4
$ cd ../mu
- $ hg pull ../nu
+ $ hg pull ../nu --confirm --config ui.interactive=True<<EOF
+ > y
+ > EOF
pulling from ../nu
searching for changes
adding changesets
adding manifests
adding file changes
+ adding 2 changesets with 2 changes to 2 files
+ new changesets d6bcb4f74035:145e75495359 (2 drafts)
+ 4 local changesets will be published
+ accept incoming changes (yn)? y
added 2 changesets with 2 changes to 2 files
new changesets d6bcb4f74035:145e75495359 (2 drafts)
4 local changesets published
--- a/tests/test-phases.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-phases.t Thu Apr 16 22:51:09 2020 +0530
@@ -896,11 +896,13 @@
$ hg --config "phases.new-commit=internal" commit -m "my test internal commit" 2>&1 | grep ProgrammingError
** ProgrammingError: this repository does not support the internal phase
raise error.ProgrammingError(msg)
- mercurial.error.ProgrammingError: this repository does not support the internal phase
+ mercurial.error.ProgrammingError: this repository does not support the internal phase (no-chg !)
+ ProgrammingError: this repository does not support the internal phase (chg !)
$ hg --config "phases.new-commit=archived" commit -m "my test archived commit" 2>&1 | grep ProgrammingError
** ProgrammingError: this repository does not support the archived phase
raise error.ProgrammingError(msg)
- mercurial.error.ProgrammingError: this repository does not support the archived phase
+ mercurial.error.ProgrammingError: this repository does not support the archived phase (no-chg !)
+ ProgrammingError: this repository does not support the archived phase (chg !)
$ cd ..
--- a/tests/test-pull-r.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-pull-r.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,3 +1,9 @@
+ $ cat <<EOF >> $HGRCPATH
+ > [ui]
+ > interactive = true
+ > EOF
+
+
$ hg init repo
$ cd repo
$ echo foo > foo
@@ -42,12 +48,47 @@
$ hg heads -q --closed
2:effea6de0384
1:ed1b79f46b9a
- $ hg pull
+ $ hg pull --confirm << EOF
+ > n
+ > EOF
pulling from $TESTTMP/repo2
searching for changes
adding changesets
adding manifests
adding file changes
+ adding 2 changesets with 1 changes to 1 files
+ new changesets 8c900227dd5d:00cfe9073916
+ accept incoming changes (yn)? n
+ transaction abort!
+ rollback completed
+ abort: user aborted
+ [255]
+ $ hg pull --config pull.confirm=true << EOF
+ > n
+ > EOF
+ pulling from $TESTTMP/repo2
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ adding 2 changesets with 1 changes to 1 files
+ new changesets 8c900227dd5d:00cfe9073916
+ accept incoming changes (yn)? n
+ transaction abort!
+ rollback completed
+ abort: user aborted
+ [255]
+ $ hg pull --confirm << EOF
+ > y
+ > EOF
+ pulling from $TESTTMP/repo2
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ adding 2 changesets with 1 changes to 1 files
+ new changesets 8c900227dd5d:00cfe9073916
+ accept incoming changes (yn)? y
added 2 changesets with 1 changes to 1 files
new changesets 8c900227dd5d:00cfe9073916
(run 'hg update' to get a working copy)
@@ -56,6 +97,12 @@
2:effea6de0384
1:ed1b79f46b9a
+pull--confirm config option should be ignored if HGPLAIN is set
+ $ HGPLAIN=1 hg pull --config pull.confirm=True
+ pulling from $TESTTMP/repo2
+ searching for changes
+ no changes found
+
$ cd ..
$ hg init copy
--- a/tests/test-purge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-purge.t Thu Apr 16 22:51:09 2020 +0530
@@ -25,7 +25,9 @@
empty_dir
$ hg purge -v
removing directory empty_dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -41,7 +43,9 @@
removing file untracked_dir/untracked_file1
removing file untracked_dir/untracked_file2
removing directory untracked_dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -61,7 +65,9 @@
$ hg purge -v
removing file untracked_file
removing file untracked_file_readonly
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -72,7 +78,9 @@
directory/untracked_file
$ hg purge -v
removing file directory/untracked_file
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -84,7 +92,9 @@
$ hg purge -v
removing directory untracked_directory/nested_directory
removing directory untracked_directory
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -98,7 +108,9 @@
removing directory untracked_directory/nested_directory
removing directory untracked_directory
$ cd ..
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
@@ -113,26 +125,43 @@
removing directory untracked_directory/nested_directory
removing directory untracked_directory
$ cd ..
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
r1
$ ls directory/untracked_file
directory/untracked_file
$ rm directory/untracked_file
-skip ignored files if --all not specified
+skip ignored files if -i or --all not specified
$ touch ignored
$ hg purge -p
$ hg purge -v
+ $ touch untracked_file
$ ls
directory
ignored
r1
+ untracked_file
+ $ hg purge -p -i
+ ignored
+ $ hg purge -v -i
+ removing file ignored
+ $ ls -A
+ .hg
+ .hgignore
+ directory
+ r1
+ untracked_file
+ $ touch ignored
$ hg purge -p --all
ignored
+ untracked_file
$ hg purge -v --all
removing file ignored
+ removing file untracked_file
$ ls
directory
r1
@@ -176,7 +205,9 @@
$ touch excluded_file
$ hg purge -p -X excluded_file
$ hg purge -v -X excluded_file
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
excluded_file
r1
@@ -188,7 +219,9 @@
$ touch excluded_dir/file
$ hg purge -p -X excluded_dir
$ hg purge -v -X excluded_dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
excluded_dir
r1
@@ -201,7 +234,9 @@
$ mkdir excluded_dir
$ hg purge -p -X excluded_dir
$ hg purge -v -X excluded_dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
directory
excluded_dir
r1
@@ -228,7 +263,9 @@
$ hg purge -v --files
removing file dir/untracked_file
removing file untracked_file
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
dir
empty_dir
$ ls dir
@@ -241,7 +278,9 @@
empty_dir
$ hg purge -v --dirs
removing directory empty_dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
dir
untracked_file
$ ls dir
@@ -260,6 +299,8 @@
removing file untracked_file
removing directory empty_dir
removing directory dir
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
$ cd ..
--- a/tests/test-push-race.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-push-race.t Thu Apr 16 22:51:09 2020 +0530
@@ -119,11 +119,11 @@
#testcases strict unrelated
-#if unrelated
+#if strict
$ cat >> $HGRCPATH << EOF
> [server]
- > concurrent-push-mode = check-related
+ > concurrent-push-mode = strict
> EOF
#endif
--- a/tests/test-rebase-abort.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-abort.t Thu Apr 16 22:51:09 2020 +0530
@@ -88,18 +88,13 @@
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
$ hg debugmergestate
- * version 2 records
- local: 3e046f2ecedb793b97ed32108086edd1a162f8bc
- other: 46f0b057b5c061d276b91491c22151f78698abd2
- labels:
- local: dest
- other: source
- unrecognized entry: x advisory record
- file extras: common (ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c)
- file: common (record type "F", state "u", hash 94c8c21d08740f5da9eaa38d1f175c592692f0d1)
- local path: common (flags "")
+ local (dest): 3e046f2ecedb793b97ed32108086edd1a162f8bc
+ other (source): 46f0b057b5c061d276b91491c22151f78698abd2
+ file: common (state "u")
+ local path: common (hash 94c8c21d08740f5da9eaa38d1f175c592692f0d1, flags "")
ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6)
other path: common (node 2f6411de53677f6f1048fef5bf888d67a342e0a5)
+ extra: ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c
$ hg resolve -l
U common
@@ -107,18 +102,9 @@
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
$ hg debugmergestate
- * version 2 records
- local: 3e046f2ecedb793b97ed32108086edd1a162f8bc
- other: 46f0b057b5c061d276b91491c22151f78698abd2
- labels:
- local: dest
- other: source
- file extras: common (ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c)
- file: common (record type "F", state "u", hash 94c8c21d08740f5da9eaa38d1f175c592692f0d1)
- local path: common (flags "")
- ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6)
- other path: common (node 2f6411de53677f6f1048fef5bf888d67a342e0a5)
- unrecognized entry: X mandatory record
+ abort: unsupported merge state records: X
+ (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
+ [255]
$ hg resolve -l
abort: unsupported merge state records: X
(see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
@@ -236,7 +222,7 @@
[1]
$ hg tglog
- @ 4:draft 'C1'
+ % 4:draft 'C1'
|
o 3:draft 'B bis'
|
--- a/tests/test-rebase-collapse.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-collapse.t Thu Apr 16 22:51:09 2020 +0530
@@ -184,33 +184,33 @@
Rebase and collapse - E onto H:
- $ hg rebase -s E --dest I --collapse # root (E) is not a merge
- abort: unknown revision 'I'!
- [255]
+ $ hg rebase -s E --dest H --collapse # root (E) is not a merge
+ rebasing 5:49cb92066bfd "E" (E)
+ rebasing 6:11abe3fb10b8 "F" (F)
+ rebasing 7:64e264db77f0 "G" (G tip)
+ saved backup bundle to $TESTTMP/multiple-external-parents/.hg/strip-backup/49cb92066bfd-ee8a8a79-rebase.hg
$ hg tglog
- o 7: 64e264db77f0 'G'
- |\
- | o 6: 11abe3fb10b8 'F'
- | |
- | o 5: 49cb92066bfd 'E'
+ o 5: 8b2315790719 'Collapsed revision
+ |\ * E
+ | | * F
+ | | * G'
+ | o 4: 4e4f9194f9f1 'D'
+ | |\
+ o | | 3: 575c4b5ec114 'H'
+ | | |
+ +---o 2: dc0947a82db8 'C'
| |
- o | 4: 4e4f9194f9f1 'D'
- |\|
- | | o 3: 575c4b5ec114 'H'
- | | |
- o---+ 2: dc0947a82db8 'C'
- / /
- o / 1: 112478962961 'B'
+ | o 1: 112478962961 'B'
|/
o 0: 426bada5c675 'A'
$ hg manifest --rev tip
A
- B
C
E
F
+ H
$ cd ..
@@ -486,61 +486,6 @@
abort: cannot collapse multiple named branches
[255]
- $ repeatchange() {
- > hg checkout $1
- > hg cp d z
- > echo blah >> z
- > hg commit -Am "$2" --user "$3"
- > }
- $ repeatchange 3 "E" "user1"
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ repeatchange 3 "E" "user2"
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- created new head
- $ hg tglog
- @ 5: fbfb97b1089a 'E'
- |
- | o 4: f338eb3c2c7c 'E'
- |/
- o 3: 41acb9dca9eb 'D'
- |
- | o 2: 8ac4a08debf1 'C' two
- | |
- | o 1: 1ba175478953 'B' one
- |/
- o 0: 1994f17a630e 'A'
-
- $ hg rebase -s 5 -d 4
- rebasing 5:fbfb97b1089a "E" (tip)
- note: not rebasing 5:fbfb97b1089a "E" (tip), its destination already has all its changes
- saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-rebase.hg
- $ hg tglog
- @ 4: f338eb3c2c7c 'E'
- |
- o 3: 41acb9dca9eb 'D'
- |
- | o 2: 8ac4a08debf1 'C' two
- | |
- | o 1: 1ba175478953 'B' one
- |/
- o 0: 1994f17a630e 'A'
-
- $ hg export tip
- # HG changeset patch
- # User user1
- # Date 0 0
- # Thu Jan 01 00:00:00 1970 +0000
- # Node ID f338eb3c2c7cc5b5915676a2376ba7ac558c5213
- # Parent 41acb9dca9eb976e84cd21fcb756b4afa5a35c09
- E
-
- diff -r 41acb9dca9eb -r f338eb3c2c7c z
- --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- +++ b/z Thu Jan 01 00:00:00 1970 +0000
- @@ -0,0 +1,2 @@
- +d
- +blah
-
$ cd ..
Rebase, collapse and copies
@@ -767,7 +712,7 @@
|
| @ 2: 82b8abf9c185 'D'
| |
- @ | 1: f899f3910ce7 'B'
+ % | 1: f899f3910ce7 'B'
|/
o 0: 4a2df7238c3b 'A'
@@ -791,7 +736,7 @@
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
$ hg tglog
- @ 3: 63668d570d21 'C'
+ % 3: 63668d570d21 'C'
|
| @ 2: 82b8abf9c185 'D'
| |
@@ -817,7 +762,7 @@
abort: edit failed: false exited with status 1
[255]
$ hg tglog
- o 3: 63668d570d21 'C'
+ % 3: 63668d570d21 'C'
|
| @ 2: 82b8abf9c185 'D'
| |
--- a/tests/test-rebase-conflicts.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-conflicts.t Thu Apr 16 22:51:09 2020 +0530
@@ -429,3 +429,72 @@
|/
o 0:draft 'A'
+
+Test where the conflict happens when rebasing a merge commit
+
+ $ cd $TESTTMP
+ $ hg init conflict-in-merge
+ $ cd conflict-in-merge
+ $ hg debugdrawdag <<'EOS'
+ > F # F/conflict = foo\n
+ > |\
+ > D E
+ > |/
+ > C B # B/conflict = bar\n
+ > |/
+ > A
+ > EOS
+
+ $ hg co F
+ 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rebase -d B
+ rebasing 2:dc0947a82db8 "C" (C)
+ rebasing 3:e7b3f00ed42e "D" (D)
+ rebasing 4:03ca77807e91 "E" (E)
+ rebasing 5:9a6b91dc2044 "F" (F tip)
+ merging conflict
+ warning: conflicts while merging conflict! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+ $ hg tglog
+ @ 8:draft 'E'
+ |
+ | @ 7:draft 'D'
+ |/
+ o 6:draft 'C'
+ |
+ | % 5:draft 'F'
+ | |\
+ | | o 4:draft 'E'
+ | | |
+ | o | 3:draft 'D'
+ | |/
+ | o 2:draft 'C'
+ | |
+ o | 1:draft 'B'
+ |/
+ o 0:draft 'A'
+
+ $ echo baz > conflict
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase -c
+ already rebased 2:dc0947a82db8 "C" (C) as 0199610c343e
+ already rebased 3:e7b3f00ed42e "D" (D) as f0dd538aaa63
+ already rebased 4:03ca77807e91 "E" (E) as cbf25af8347d
+ rebasing 5:9a6b91dc2044 "F" (F)
+ saved backup bundle to $TESTTMP/conflict-in-merge/.hg/strip-backup/dc0947a82db8-ca7e7d5b-rebase.hg
+ $ hg tglog
+ @ 5:draft 'F'
+ |\
+ | o 4:draft 'E'
+ | |
+ o | 3:draft 'D'
+ |/
+ o 2:draft 'C'
+ |
+ o 1:draft 'B'
+ |
+ o 0:draft 'A'
+
--- a/tests/test-rebase-dest.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-dest.t Thu Apr 16 22:51:09 2020 +0530
@@ -156,7 +156,7 @@
Multiple destinations cannot be used with --base:
- $ rebasewithdag -b B+E -d 'SRC^^' --collapse <<'EOS'
+ $ rebasewithdag -b B -b E -d 'SRC^^' --collapse <<'EOS'
> B E
> | |
> A D
@@ -256,7 +256,7 @@
> EOS
rebasing 3:a4256619d830 "B" (B)
rebasing 6:8e139e245220 "C" (C tip)
- o 8: 51e2ce92e06a C
+ o 8: d7d1169e9b1c C
|\
| o 7: 2ed0c8546285 B
| |\
@@ -272,7 +272,7 @@
Move to a previous parent:
- $ rebasewithdag -s E+F+G -d 'SRC^^' <<'EOS'
+ $ rebasewithdag -s E -s F -s G -d 'SRC^^' <<'EOS'
> H
> |
> D G
--- a/tests/test-rebase-interruptions.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-interruptions.t Thu Apr 16 22:51:09 2020 +0530
@@ -281,6 +281,66 @@
$ cd ..
+Continue rebase after upgrading from an hg version before 9c9cfecd4600:
+
+ $ hg clone -q -u . a a4
+ $ cd a4
+
+ $ hg tglog
+ @ 4: ae36e8e3dfd7 'E'
+ |
+ o 3: 46b37eabc604 'D'
+ |
+ | o 2: 965c486023db 'C'
+ | |
+ | o 1: 27547f69f254 'B'
+ |/
+ o 0: 4a2df7238c3b 'A'
+
+ $ hg rebase -s 1 -d 4
+ rebasing 1:27547f69f254 "B"
+ rebasing 2:965c486023db "C"
+ merging A
+ warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+ $ hg tglog
+ @ 5: 45396c49d53b 'B'
+ |
+ o 4: ae36e8e3dfd7 'E'
+ |
+ o 3: 46b37eabc604 'D'
+ |
+ | % 2: 965c486023db 'C'
+ | |
+ | o 1: 27547f69f254 'B'
+ |/
+ o 0: 4a2df7238c3b 'A'
+
+Simulate having run the above with an older hg version by manually setting
+two dirstate parents. We should not get a merge commit when we continue.
+ $ hg debugsetparents 5 2
+ $ echo 'conflict solved' > A
+ $ hg resolve -m A
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase --continue
+ already rebased 1:27547f69f254 "B" as 45396c49d53b
+ rebasing 2:965c486023db "C"
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/27547f69f254-359abdd7-rebase.hg
+ $ hg tglog
+ o 4: d2d25e26288e 'C'
+ |
+ o 3: 45396c49d53b 'B'
+ |
+ @ 2: ae36e8e3dfd7 'E'
+ |
+ o 1: 46b37eabc604 'D'
+ |
+ o 0: 4a2df7238c3b 'A'
+
+ $ cd ..
+
(precommit version)
$ cp -R a3 hook-precommit
@@ -294,7 +354,7 @@
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
- | @ 6: a0b2430ebfb8 secret 'F'
+ | o 6: a0b2430ebfb8 secret 'F'
| |
o | 5: 45396c49d53b public 'B'
| |
@@ -345,7 +405,7 @@
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
- | @ 6: a0b2430ebfb8 secret 'F'
+ | o 6: a0b2430ebfb8 secret 'F'
| |
o | 5: 45396c49d53b public 'B'
| |
@@ -395,7 +455,7 @@
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
- | @ 6: a0b2430ebfb8 secret 'F'
+ | o 6: a0b2430ebfb8 secret 'F'
| |
o | 5: 45396c49d53b public 'B'
| |
--- a/tests/test-rebase-newancestor.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-newancestor.t Thu Apr 16 22:51:09 2020 +0530
@@ -68,11 +68,6 @@
that is mixed up with the actual merge stuff and there is in general no way to
separate them.
-Note: The dev branch contains _no_ changes to f-default. It might be unclear
-how rebasing of ancestor merges should be handled, but the current behavior
-with spurious prompts for conflicts in files that didn't change seems very
-wrong.
-
$ hg init ancestor-merge
$ cd ancestor-merge
@@ -133,16 +128,11 @@
note: not rebasing 1:1d1a643d390e "dev: create branch", its destination already has all its changes
rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
rebasing 4:4b019212aaf6 "dev: merge default"
- file 'f-default' was deleted in local [dest] but was modified in other [source].
- You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
- What do you want to do? c
+ note: not rebasing 4:4b019212aaf6 "dev: merge default", its destination already has all its changes
rebasing 6:010ced67e558 "dev: merge default"
+ note: not rebasing 6:010ced67e558 "dev: merge default", its destination already has all its changes
saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-4a6f6d17-rebase.hg
$ hg tglog
- o 6: de147e4f69cf 'dev: merge default'
- |
- o 5: eda7b7f46f5d 'dev: merge default'
- |
o 4: 3e075b1c0a40 'dev: f-dev stuff'
|
@ 3: e08089805d82 'default: f-other stuff'
@@ -163,28 +153,8 @@
> EOF
rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
rebasing 4:4b019212aaf6 "dev: merge default"
- file 'f-default' was deleted in local [dest] but was modified in other [source].
- You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
- What do you want to do? c
- rebasing 6:010ced67e558 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-827d7a44-rebase.hg
- $ hg tglog
- o 7: de147e4f69cf 'dev: merge default'
- |
- o 6: eda7b7f46f5d 'dev: merge default'
- |
- o 5: 3e075b1c0a40 'dev: f-dev stuff'
- |
- o 4: e08089805d82 'default: f-other stuff'
- |
- o 3: 462860db70a1 'default: remove f-default'
- |
- o 2: f157ecfd2b6b 'default: f-default stuff'
- |
- | o 1: 1d1a643d390e 'dev: create branch' dev
- |/
- o 0: e90e8eb90b6f 'default: create f-default'
-
+ abort: rebasing 4:4b019212aaf6 will include unwanted changes from 1:1d1a643d390e
+ [255]
$ cd ..
@@ -284,18 +254,7 @@
rebasing 6:4c5f12f25ebe "merge rebase ancestors" (tip)
resolving manifests
removing other
- note: merging f9daf77ffe76+ and 4c5f12f25ebe using bids from ancestors a60552eb93fb and f59da8fc0fcf
-
- calculating bids for ancestor a60552eb93fb
resolving manifests
-
- calculating bids for ancestor f59da8fc0fcf
- resolving manifests
-
- auction for merging merge bids
- other: consensus for g
- end of auction
-
getting other
committing files:
other
--- a/tests/test-rebase-obsolete.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-obsolete.t Thu Apr 16 22:51:09 2020 +0530
@@ -1795,19 +1795,15 @@
$ hg log -G
@ 2:b18e25de2cf5 D
|
- | @ 1:2ec65233581b B (pruned using prune)
- |/
o 0:426bada5c675 A
$ hg summary
parent: 2:b18e25de2cf5 tip
D
- parent: 1:2ec65233581b (obsolete)
- B
branch: default
- commit: 2 modified, 1 unknown, 1 unresolved (merge)
+ commit: 1 modified, 1 added, 1 unknown, 1 unresolved
update: (current)
- phases: 3 draft
+ phases: 2 draft
rebase: 0 rebased, 2 remaining (rebase --continue)
$ hg rebase --abort
--- a/tests/test-rebase-parameters.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-parameters.t Thu Apr 16 22:51:09 2020 +0530
@@ -92,6 +92,18 @@
empty "rev" revision set - nothing to rebase
[1]
+ $ hg rebase --rev 'wdir()' --dest 6
+ abort: cannot rebase the working copy
+ [255]
+
+ $ hg rebase --source 'wdir()' --dest 6
+ abort: cannot rebase the working copy
+ [255]
+
+ $ hg rebase --source 1 --source 'wdir()' --dest 6
+ abort: cannot rebase the working copy
+ [255]
+
$ hg rebase --source '1 & !1' --dest 8
empty "source" revision set - nothing to rebase
[1]
@@ -473,11 +485,9 @@
$ hg summary
parent: 1:56daeba07f4b
c2
- parent: 2:e4e3f3546619 tip
- c2b
branch: default
- commit: 1 modified, 1 unresolved (merge)
- update: (current)
+ commit: 1 unresolved (clean)
+ update: 1 new changesets, 2 branch heads (merge)
phases: 3 draft
rebase: 0 rebased, 1 remaining (rebase --continue)
--- a/tests/test-rebase-rename.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-rename.t Thu Apr 16 22:51:09 2020 +0530
@@ -108,6 +108,62 @@
+ $ repeatchange() {
+ > hg checkout $1
+ > hg cp a z
+ > echo blah >> z
+ > hg commit -Am "$2" --user "$3"
+ > }
+ $ repeatchange 1 "E" "user1"
+ 2 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ created new head
+ $ repeatchange 1 "E" "user2"
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ created new head
+ $ hg tglog
+ @ 5: af8ad1f97097 'E'
+ |
+ | o 4: 60f545c27784 'E'
+ |/
+ | o 3: 032a9b75e83b 'rename A'
+ | |
+ | o 2: 220d0626d185 'rename B'
+ |/
+ o 1: 3ab5da9a5c01 'B'
+ |
+ o 0: 1994f17a630e 'A'
+
+ $ hg rebase -s 5 -d 4
+ rebasing 5:af8ad1f97097 "E" (tip)
+ note: not rebasing 5:af8ad1f97097 "E" (tip), its destination already has all its changes
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/af8ad1f97097-c3e90708-rebase.hg
+ $ hg tglog
+ @ 4: 60f545c27784 'E'
+ |
+ | o 3: 032a9b75e83b 'rename A'
+ | |
+ | o 2: 220d0626d185 'rename B'
+ |/
+ o 1: 3ab5da9a5c01 'B'
+ |
+ o 0: 1994f17a630e 'A'
+
+ $ hg export tip
+ # HG changeset patch
+ # User user1
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 60f545c277846e6bad309919bae3ae106f59cb39
+ # Parent 3ab5da9a5c01faa02c20f2ec4870a4f689c92da6
+ E
+
+ diff -r 3ab5da9a5c01 -r 60f545c27784 z
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/z Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,2 @@
+ +a
+ +blah
+
$ cd ..
--- a/tests/test-rebase-transaction.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rebase-transaction.t Thu Apr 16 22:51:09 2020 +0530
@@ -114,7 +114,7 @@
|
| @ 4: Z
| |
- @ | 3: C
+ % | 3: C
| |
| o 2: Y
| |
@@ -123,9 +123,9 @@
o 0: A
$ hg st
- M C
M conflict
A B
+ A C
? conflict.orig
$ echo resolved > conflict
$ hg resolve -m
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-remotefilelog-hgweb.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,38 @@
+#require no-windows serve
+
+ $ . "$TESTDIR/remotefilelog-library.sh"
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > remotefilelog=
+ > share=
+ > EOF
+
+ $ hg init master
+ $ cd master
+ $ cat >> .hg/hgrc <<EOF
+ > [remotefilelog]
+ > server=True
+ > EOF
+ $ echo x > x
+ $ hg commit -qAm x
+
+ $ cd ..
+
+
+ $ hgcloneshallow ssh://user@dummy/master wdir --noupdate -q
+ $ cd wdir
+ $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
+ $ cat hg.pid >> $DAEMON_PIDS
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/x' | head -n 10
+ 200 Script output follows
+
+ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+ <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
+ <head>
+ <link rel="icon" href="/static/hgicon.png" type="image/png" />
+ <meta name="robots" content="index, nofollow" />
+ <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
+ <script type="text/javascript" src="/static/mercurial.js"></script>
+
+
--- a/tests/test-remotefilelog-local.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-remotefilelog-local.t Thu Apr 16 22:51:09 2020 +0530
@@ -121,7 +121,8 @@
# unbundle
$ clearcache
- $ ls
+ $ ls -A
+ .hg
w
x
y
--- a/tests/test-rename-after-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rename-after-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -120,4 +120,14 @@
$ hg log -r tip -C -v | grep copies
copies: b2 (b1)
+Test marking/unmarking copies in merge commit
+
+ $ hg copy --forget --at-rev . b2
+ abort: cannot mark/unmark copy in merge commit
+ [255]
+
+ $ hg copy --after --at-rev . b1 b2
+ abort: cannot mark/unmark copy in merge commit
+ [255]
+
$ cd ..
--- a/tests/test-rename-dir-merge.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rename-dir-merge.t Thu Apr 16 22:51:09 2020 +0530
@@ -30,8 +30,9 @@
b/a
b/b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a/a' -> dst: 'b/a'
- src: 'a/b' -> dst: 'b/b'
+ on remote side:
+ src: 'a/a' -> dst: 'b/a'
+ src: 'a/b' -> dst: 'b/b'
checking for directory renames
discovered dir src: 'a/' -> dst: 'b/'
pending file src: 'a/c' -> dst: 'b/c'
@@ -75,8 +76,9 @@
unmatched files in other:
a/c
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a/a' -> dst: 'b/a'
- src: 'a/b' -> dst: 'b/b'
+ on local side:
+ src: 'a/a' -> dst: 'b/a'
+ src: 'a/b' -> dst: 'b/b'
checking for directory renames
discovered dir src: 'a/' -> dst: 'b/'
pending file src: 'a/c' -> dst: 'b/c'
--- a/tests/test-rename-merge1.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rename-merge1.t Thu Apr 16 22:51:09 2020 +0530
@@ -28,9 +28,11 @@
b
b2
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
- src: 'a2' -> dst: 'b2' !
- src: 'a2' -> dst: 'c2' !
+ on local side:
+ src: 'a2' -> dst: 'c2' !
+ on remote side:
+ src: 'a' -> dst: 'b' *
+ src: 'a2' -> dst: 'b2' !
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -170,7 +172,8 @@
unmatched files in other:
newfile
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'file' -> dst: 'newfile' %
+ on remote side:
+ src: 'file' -> dst: 'newfile' %
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -184,3 +187,50 @@
$ hg status
M newfile
$ cd ..
+
+Create x and y, then modify y and rename x to z on one side of merge, and
+modify x and rename y to z on the other side.
+ $ hg init conflicting-target
+ $ cd conflicting-target
+ $ echo x > x
+ $ echo y > y
+ $ hg ci -Aqm 'add x and y'
+ $ hg mv x z
+ $ echo foo >> y
+ $ hg ci -qm 'modify y, rename x to z'
+ $ hg co -q 0
+ $ hg mv y z
+ $ echo foo >> x
+ $ hg ci -qm 'modify x, rename y to z'
+# We should probably tell the user about the conflicting rename sources.
+# Depending on which side they pick, we should take that rename and get
+# the changes to the source from the other side. The unchanged file should
+# remain.
+ $ hg merge --debug 1 -t :merge3
+ all copies found (* = to merge, ! = divergent, % = renamed and deleted):
+ on local side:
+ src: 'y' -> dst: 'z' *
+ on remote side:
+ src: 'x' -> dst: 'z' *
+ checking for directory renames
+ resolving manifests
+ branchmerge: True, force: False, partial: False
+ ancestor: 5151c134577e, local: 07fcbc9a74ed+, remote: f21419739508
+ preserving z for resolve of z
+ starting 4 threads for background file closing (?)
+ z: both renamed from y -> m (premerge)
+ picked tool ':merge3' for z (binary False symlink False changedelete False)
+ merging z
+ my z@07fcbc9a74ed+ other z@f21419739508 ancestor y@5151c134577e
+ premerge successful
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ ls
+ x
+ z
+ $ cat x
+ x
+ foo
+# 'z' should have had the added 'foo' line
+ $ cat z
+ x
--- a/tests/test-rename-merge2.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rename-merge2.t Thu Apr 16 22:51:09 2020 +0530
@@ -79,7 +79,8 @@
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -117,7 +118,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -156,7 +158,8 @@
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -194,7 +197,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -231,7 +235,8 @@
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -263,7 +268,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -294,7 +300,8 @@
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -327,7 +334,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -355,7 +363,10 @@
test L:um a b R:um a b W: - 9 do merge with ancestor in a
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -400,8 +411,10 @@
unmatched files in other:
c
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' !
- src: 'a' -> dst: 'c' !
+ on local side:
+ src: 'a' -> dst: 'b' !
+ on remote side:
+ src: 'a' -> dst: 'c' !
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -434,7 +447,8 @@
test L:nc a b R:up b W: - 12 merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -473,7 +487,8 @@
test L:up b R:nm a b W: - 13 merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -513,7 +528,8 @@
test L:nc a b R:up a b W: - 14 merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -522,19 +538,15 @@
preserving rev for resolve of rev
a: remote is newer -> g
getting a
- b: both created -> m (premerge)
+ b: both renamed from a -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
- my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+ my b@86a2aa42fc76+ other b@8dbce441892a ancestor a@924404dff337
+ premerge successful
rev: versions differ -> m (premerge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
- b: both created -> m (merge)
- picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
- my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
- merge tool returned: 0
rev: versions differ -> m (merge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
@@ -553,7 +565,8 @@
test L:up b R:nm a b W: - 15 merge b no ancestor, remove a
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b'
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -593,7 +606,8 @@
test L:nc a b R:up a b W: - 16 get a, merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -602,19 +616,15 @@
preserving rev for resolve of rev
a: remote is newer -> g
getting a
- b: both created -> m (premerge)
+ b: both renamed from a -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
- my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+ my b@86a2aa42fc76+ other b@8dbce441892a ancestor a@924404dff337
+ premerge successful
rev: versions differ -> m (premerge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
- b: both created -> m (merge)
- picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
- my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
- merge tool returned: 0
rev: versions differ -> m (merge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
@@ -633,7 +643,8 @@
test L:up a b R:nc a b W: - 17 keep a, merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -641,19 +652,15 @@
preserving b for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- b: both created -> m (premerge)
+ b: both renamed from a -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
- my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
+ my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor a@924404dff337
+ premerge successful
rev: versions differ -> m (premerge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
- b: both created -> m (merge)
- picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
- my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
- launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
- merge tool returned: 0
rev: versions differ -> m (merge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
@@ -672,7 +679,8 @@
test L:nm a b R:up a b W: - 18 merge b no ancestor
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -680,35 +688,24 @@
preserving b for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: prompt deleted/changed -> m (premerge)
- picked tool ':prompt' for a (binary False symlink False changedelete True)
- file 'a' was deleted in local [working copy] but was modified in other [merge rev].
- You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
- What do you want to do? u
- b: both created -> m (premerge)
+ b: both renamed from a -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
- my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
+ my b@02963e448370+ other b@8dbce441892a ancestor a@924404dff337
+ premerge successful
rev: versions differ -> m (premerge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
- b: both created -> m (merge)
- picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
- my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
- merge tool returned: 0
rev: versions differ -> m (merge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
- 0 files updated, 2 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
--------------
- M a
M b
- abort: unresolved merge conflicts (see 'hg help resolve')
--------------
$ tm "up a b" "nm a b" " " "19 merge b no ancestor, prompt remove a"
@@ -717,44 +714,34 @@
test L:up a b R:nm a b W: - 19 merge b no ancestor, prompt remove a
--------------
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b'
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
ancestor: 924404dff337, local: 0b76e65c8289+, remote: bdb19105162a
- preserving a for resolve of a
preserving b for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: prompt changed/deleted -> m (premerge)
- picked tool ':prompt' for a (binary False symlink False changedelete True)
- file 'a' was deleted in other [merge rev] but was modified in local [working copy].
- You can use (c)hanged version, (d)elete, or leave (u)nresolved.
- What do you want to do? u
- b: both created -> m (premerge)
+ b: both renamed from a -> m (premerge)
picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
- my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
+ my b@0b76e65c8289+ other b@bdb19105162a ancestor a@924404dff337
+ premerge successful
rev: versions differ -> m (premerge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
- b: both created -> m (merge)
- picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
- my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
- launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
- merge tool returned: 0
rev: versions differ -> m (merge)
picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
- 0 files updated, 2 files merged, 0 files removed, 1 files unresolved
- use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
--------------
M b
C a
- abort: unresolved merge conflicts (see 'hg help resolve')
--------------
$ tm "up a " "um a b" " " "20 merge a and b to b, remove a"
@@ -765,7 +752,8 @@
unmatched files in other:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on remote side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -807,7 +795,8 @@
unmatched files in local:
b
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -853,7 +842,8 @@
unmatched files in other:
c
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: 'a' -> dst: 'b' *
+ on local side:
+ src: 'a' -> dst: 'b' *
checking for directory renames
resolving manifests
branchmerge: True, force: False, partial: False
@@ -936,11 +926,14 @@
4/g
7/f
all copies found (* = to merge, ! = divergent, % = renamed and deleted):
- src: '1/f' -> dst: '1/g' *
- src: '3/f' -> dst: '3/g' *
- src: '4/f' -> dst: '4/g' *
- src: '5/f' -> dst: '5/g' *
- src: '6/f' -> dst: '6/g' *
+ on local side:
+ src: '1/f' -> dst: '1/g' *
+ src: '5/f' -> dst: '5/g' *
+ src: '6/f' -> dst: '6/g' *
+ on remote side:
+ src: '1/f' -> dst: '1/g' *
+ src: '3/f' -> dst: '3/g' *
+ src: '4/f' -> dst: '4/g' *
checking for directory renames
$ hg mani
0/f
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rename-rev.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,56 @@
+ $ hg init
+ $ mkdir d1 d1/d11 d2
+ $ echo d1/a > d1/a
+ $ echo d1/ba > d1/ba
+ $ echo d1/a1 > d1/d11/a1
+ $ echo d1/b > d1/b
+ $ echo d2/b > d2/b
+ $ hg add d1/a d1/b d1/ba d1/d11/a1 d2/b
+ $ hg commit -m "intial"
+
+
+Test single file
+
+# One recoded copy, one copy to record after commit
+ $ hg cp d1/b d1/c
+ $ cp d1/b d1/d
+ $ hg add d1/d
+ $ hg ci -m 'copy d1/b to d1/c and d1/d'
+ $ hg st -C --change .
+ A d1/c
+ d1/b
+ A d1/d
+# Errors out without --after for now
+ $ hg cp --at-rev . d1/b d1/d
+ abort: --at-rev requires --after
+ [255]
+# Errors out with non-existent destination
+ $ hg cp -A --at-rev . d1/b d1/non-existent
+ abort: d1/non-existent: copy destination does not exist in 8a9d70fa20c9
+ [255]
+# Successful invocation
+ $ hg cp -A --at-rev . d1/b d1/d
+ saved backup bundle to $TESTTMP/.hg/strip-backup/8a9d70fa20c9-973ae357-copy.hg
+# New copy is recorded, and previously recorded copy is also still there
+ $ hg st -C --change .
+ A d1/c
+ d1/b
+ A d1/d
+ d1/b
+
+Test using directory as destination
+
+ $ hg co 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ cp -R d1 d3
+ $ hg add d3
+ adding d3/a
+ adding d3/b
+ adding d3/ba
+ adding d3/d11/a1
+ $ hg ci -m 'copy d1/ to d3/'
+ created new head
+ $ hg cp -A --at-rev . d1 d3
+ abort: d3: --at-rev does not support a directory as destination
+ [255]
+
--- a/tests/test-repair-strip.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-repair-strip.t Thu Apr 16 22:51:09 2020 +0530
@@ -25,7 +25,9 @@
> else
> echo "(no journal)"
> fi
- > ls .hg/store/journal >/dev/null 2>&1 && hg recover
+ > if ls .hg/store/journal >/dev/null 2>&1; then
+ > hg recover --verify
+ > fi
> ls .hg/strip-backup/* >/dev/null 2>&1 && hg unbundle -q .hg/strip-backup/*
> rm -rf .hg/strip-backup
> }
--- a/tests/test-repo-compengines.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-repo-compengines.t Thu Apr 16 22:51:09 2020 +0530
@@ -22,10 +22,15 @@
Unknown compression engine to format.compression aborts
$ hg --config format.revlog-compression=unknown init unknown
- abort: compression engine unknown defined by format.revlog-compression not available
+ abort: compression engines "unknown" defined by format.revlog-compression not available
(run "hg debuginstall" to list available compression engines)
[255]
+unknown compression engine in a list with known one works fine
+
+ $ hg --config format.revlog-compression=zlib,unknown init zlib-before-unknow
+ $ hg --config format.revlog-compression=unknown,zlib init unknown-before-zlib
+
A requirement specifying an unknown compression engine results in bail
$ hg init unknownrequirement
--- a/tests/test-repo-filters-tiptoe.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-repo-filters-tiptoe.t Thu Apr 16 22:51:09 2020 +0530
@@ -67,6 +67,12 @@
R a
! b
+ $ hg status --copies
+ M c
+ A d
+ R a
+ ! b
+
Getting data about the working copy parent
$ hg log -r '.' -T "{node}\n{date}\n"
--- a/tests/test-resolve.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-resolve.t Thu Apr 16 22:51:09 2020 +0530
@@ -306,48 +306,40 @@
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
$ hg debugmergestate
- * version 2 records
- local: 57653b9f834a4493f7240b0681efcb9ae7cab745
- other: dc77451844e37f03f5c559e3b8529b2b48d381d1
- labels:
- local: working copy
- other: merge rev
- unrecognized entry: x advisory record
- file extras: file1 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac)
- file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
+ local (working copy): 57653b9f834a4493f7240b0681efcb9ae7cab745
+ other (merge rev): dc77451844e37f03f5c559e3b8529b2b48d381d1
+ file: file1 (state "r")
+ local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
- file extras: file2 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac)
- file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523)
- local path: file2 (flags "")
+ extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
+ file: file2 (state "u")
+ local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "")
ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
+ extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
$ hg resolve -l
R file1
U file2
+test json output
+
+ $ hg debugmergestate -T json
+ [
+ {
+ "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}],
+ "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}]
+ }
+ ]
+
+
insert unsupported mandatory merge record
$ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
$ hg debugmergestate
- * version 2 records
- local: 57653b9f834a4493f7240b0681efcb9ae7cab745
- other: dc77451844e37f03f5c559e3b8529b2b48d381d1
- labels:
- local: working copy
- other: merge rev
- file extras: file1 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac)
- file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390)
- local path: file1 (flags "")
- ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
- other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
- file extras: file2 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac)
- file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523)
- local path: file2 (flags "")
- ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
- other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
- unrecognized entry: X mandatory record
+ abort: unsupported merge state records: X
+ (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
+ [255]
$ hg resolve -l
abort: unsupported merge state records: X
(see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
--- a/tests/test-revert-interactive.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revert-interactive.t Thu Apr 16 22:51:09 2020 +0530
@@ -475,14 +475,16 @@
> EOF
add back removed file a (Yn)? y
undeleting a
- $ ls
+ $ ls -A
+ .hg
a
$ hg rm a
$ hg revert -i<<EOF
> n
> EOF
add back removed file a (Yn)? n
- $ ls
+ $ ls -A
+ .hg
$ hg revert -a
undeleting a
$ cd ..
--- a/tests/test-revert-unknown.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revert-unknown.t Thu Apr 16 22:51:09 2020 +0530
@@ -24,6 +24,7 @@
Should show a and unknown
- $ ls
+ $ ls -A
+ .hg
a
unknown
--- a/tests/test-revert.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revert.t Thu Apr 16 22:51:09 2020 +0530
@@ -73,7 +73,8 @@
$ hg status b
b: * (glob)
- $ ls
+ $ ls -A
+ .hg
a
c
e
--- a/tests/test-revlog-ancestry.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revlog-ancestry.py Thu Apr 16 22:51:09 2020 +0530
@@ -25,11 +25,11 @@
def update(rev):
- merge.update(repo, rev, branchmerge=False, force=True)
+ merge.clean_update(repo[rev])
def merge_(rev):
- merge.update(repo, rev, branchmerge=True, force=False)
+ merge.merge(repo[rev])
if __name__ == '__main__':
--- a/tests/test-revlog-raw.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revlog-raw.py Thu Apr 16 22:51:09 2020 +0530
@@ -229,7 +229,7 @@
# Gray Code. See https://en.wikipedia.org/wiki/Gray_code
gray = lambda x: x ^ (x >> 1)
- reversegray = dict((gray(i), i) for i in range(m))
+ reversegray = {gray(i): i for i in range(m)}
# Generate (n * 2) bit gray code, yield lower n bits as X, and look for
# the next unused gray code where higher n bits equal to X.
--- a/tests/test-revset.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-revset.t Thu Apr 16 22:51:09 2020 +0530
@@ -1274,6 +1274,36 @@
5
7
+test ancestors/descendants relation:
+
+ $ log 'tip#generations'
+ 0
+ 1
+ 2
+ 4
+ 8
+ 9
+
+ $ log '3#g'
+ 0
+ 1
+ 3
+ 5
+ 6
+ 7
+
+ $ hg debugrevspec -p parsed 'tip#g'
+ * parsed:
+ (relation
+ (symbol 'tip')
+ (symbol 'g'))
+ 0
+ 1
+ 2
+ 4
+ 8
+ 9
+
test ancestors/descendants relation subscript:
$ log 'tip#generations[0]'
--- a/tests/test-rollback.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-rollback.t Thu Apr 16 22:51:09 2020 +0530
@@ -190,7 +190,7 @@
corrupt journal test
$ echo "foo" > .hg/store/journal
- $ hg recover
+ $ hg recover --verify
rolling back interrupted transaction
couldn't read journal entry 'foo\n'!
checking changesets
--- a/tests/test-shelve.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-shelve.t Thu Apr 16 22:51:09 2020 +0530
@@ -171,6 +171,8 @@
$ hg mv b b.rename
moving b/b to b.rename/b
$ hg cp c c.copy
+ $ hg mv d ghost
+ $ rm ghost
$ hg status -C
M a/a
A b.rename/b
@@ -178,12 +180,15 @@
A c.copy
c
R b/b
+ R d
+ ! ghost
+ d
the common case - no options or filenames
$ hg shelve
shelved as default-01
- 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg status -C
ensure that our shelved changes exist
@@ -254,6 +259,7 @@
A c.copy
c
R b/b
+ R d
$ hg shelve -l
(both of default.hg and default-1.hg should be still kept, because it
@@ -287,6 +293,7 @@
A c.copy
c
R b/b
+ R d
$ HGEDITOR=cat hg shelve -q -n wibble -m wat -e a
wat
@@ -306,6 +313,7 @@
A c.copy
c
R b/b
+ R d
$ hg shelve -l --stat
wibble (*) wat (glob)
a/a | 1 +
@@ -323,6 +331,7 @@
A c.copy
c
R b/b
+ R d
ensure old shelve backups are being deleted automatically
@@ -363,6 +372,7 @@
M b.rename/b
M c.copy
R b/b
+ R d
? a/a.orig
# The repository is in an unfinished *unshelve* state.
@@ -401,6 +411,7 @@
M b.rename/b
M c.copy
R b/b
+ R d
? a/a.orig
$ hg diff
diff --git a/a/a b/a/a
@@ -412,13 +423,19 @@
c
+=======
+a
- +>>>>>>> shelve: a68ec3400638 - shelve: changes to: [mq]: second.patch
+ +>>>>>>> shelve: 203c9f771d2b - shelve: changes to: [mq]: second.patch
diff --git a/b/b b/b.rename/b
rename from b/b
rename to b.rename/b
diff --git a/c b/c.copy
copy from c
copy to c.copy
+ diff --git a/d b/d
+ deleted file mode 100644
+ --- a/d
+ +++ /dev/null
+ @@ -1,1 +0,0 @@
+ -d
$ hg resolve -l
U a/a
@@ -434,6 +451,7 @@
M b.rename/b
M c.copy
R b/b
+ R d
? a/a.orig
$ hg unshelve -a
unshelve of 'default' aborted
@@ -512,6 +530,7 @@
c
A foo/foo
R b/b
+ R d
? a/a.orig
there should be no shelves left
@@ -1230,7 +1249,8 @@
record change 2/2 to 'd'?
(enter ? for help) [Ynesfdaq?] n
- $ ls
+ $ ls -A
+ .hg
b
c
e
@@ -1265,7 +1285,8 @@
$ hg status -v
A c
A d
- $ ls
+ $ ls -A
+ .hg
b
c
d
--- a/tests/test-sparse-clear.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-clear.t Thu Apr 16 22:51:09 2020 +0530
@@ -28,10 +28,12 @@
Clear rules when there are includes
$ hg debugsparse --include *.py
- $ ls
+ $ ls -A
+ .hg
data.py
$ hg debugsparse --clear-rules
- $ ls
+ $ ls -A
+ .hg
base.sparse
data.py
index.html
@@ -41,12 +43,14 @@
Clear rules when there are excludes
$ hg debugsparse --exclude *.sparse
- $ ls
+ $ ls -A
+ .hg
data.py
index.html
readme.txt
$ hg debugsparse --clear-rules
- $ ls
+ $ ls -A
+ .hg
base.sparse
data.py
index.html
@@ -56,18 +60,21 @@
Clearing rules should not alter profiles
$ hg debugsparse --enable-profile webpage.sparse
- $ ls
+ $ ls -A
+ .hg
base.sparse
index.html
webpage.sparse
$ hg debugsparse --include *.py
- $ ls
+ $ ls -A
+ .hg
base.sparse
data.py
index.html
webpage.sparse
$ hg debugsparse --clear-rules
- $ ls
+ $ ls -A
+ .hg
base.sparse
index.html
webpage.sparse
--- a/tests/test-sparse-clone.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-clone.t Thu Apr 16 22:51:09 2020 +0530
@@ -34,7 +34,8 @@
warning: sparse profile 'webpage.sparse' not found in rev 000000000000 - ignoring it
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd clone1
- $ ls
+ $ ls -A
+ .hg
index.html
$ cd ..
@@ -44,7 +45,8 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd clone2
- $ ls
+ $ ls -A
+ .hg
backend.sparse
webpage.sparse
$ cd ..
@@ -55,7 +57,8 @@
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd clone3
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
readme.txt
@@ -67,6 +70,7 @@
$ hg clone -q --enable-profile webpage.sparse ssh://user@dummy/myrepo clone4
warning: sparse profile 'webpage.sparse' not found in rev 000000000000 - ignoring it
$ cd clone4
- $ ls
+ $ ls -A
+ .hg
index.html
$ cd ..
--- a/tests/test-sparse-import.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-import.t Thu Apr 16 22:51:09 2020 +0530
@@ -32,7 +32,8 @@
> *.py
> EOF
$ hg debugsparse --import-rules $TESTTMP/rules_to_import
- $ ls
+ $ ls -A
+ .hg
data.py
$ hg debugsparse --reset
@@ -44,7 +45,8 @@
> *.py
> EOF
$ hg debugsparse --import-rules $TESTTMP/rules_to_import
- $ ls
+ $ ls -A
+ .hg
base.sparse
data.py
webpage.sparse
@@ -65,7 +67,8 @@
> *.py
> EOF
$ hg debugsparse --import-rules $TESTTMP/rules_to_import
- $ ls
+ $ ls -A
+ .hg
base.sparse
index.html
readme.txt
--- a/tests/test-sparse-merges.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-merges.t Thu Apr 16 22:51:09 2020 +0530
@@ -28,7 +28,8 @@
Verify bar was merged temporarily
- $ ls
+ $ ls -A
+ .hg
bar
foo
$ hg status
@@ -39,7 +40,8 @@
$ hg commit -m "merged"
cleaned up 1 temporarily added file(s) from the sparse checkout
$ hg status
- $ ls
+ $ ls -A
+ .hg
foo
$ hg cat -r . bar
@@ -108,7 +110,9 @@
o 0:53f3774ed939 added .hgignore
$ hg debugsparse --exclude "d"
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
a
$ hg merge
@@ -168,7 +172,9 @@
o 0:53f3774ed939 added .hgignore
$ hg debugsparse --exclude "a"
- $ ls
+ $ ls -A
+ .hg
+ .hgignore
d
$ hg merge
--- a/tests/test-sparse-profiles.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-profiles.t Thu Apr 16 22:51:09 2020 +0530
@@ -42,7 +42,8 @@
Verify enabling a single profile works
$ hg debugsparse --enable-profile webpage.sparse
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
webpage.sparse
@@ -50,7 +51,8 @@
Verify enabling two profiles works
$ hg debugsparse --enable-profile backend.sparse
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
index.html
@@ -59,7 +61,8 @@
Verify disabling a profile works
$ hg debugsparse --disable-profile webpage.sparse
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
webpage.sparse
@@ -81,20 +84,23 @@
$ echo foo >> data.py
$ hg ci -m 'edit profile'
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
readme.txt
webpage.sparse
$ hg up -q 0
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
webpage.sparse
$ hg up -q 1
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
readme.txt
@@ -111,7 +117,8 @@
$ echo bar >> data.py
$ hg ci -qAm "edit profile other"
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
webpage.sparse
@@ -129,7 +136,8 @@
[1]
$ rm *.orig
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
index.html
@@ -154,7 +162,8 @@
(no more unresolved files)
$ hg ci -qAm "merge profiles"
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
readme.txt
@@ -168,7 +177,8 @@
Verify stripping refreshes dirstate
$ hg strip -q -r .
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
webpage.sparse
@@ -176,7 +186,8 @@
Verify rebase conflicts pulls in the conflicting changes
$ hg up -q 1
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
readme.txt
@@ -192,7 +203,8 @@
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
$ rm *.orig
- $ ls
+ $ ls -A
+ .hg
backend.sparse
data.py
index.html
@@ -217,7 +229,8 @@
continue: hg rebase --continue
$ hg rebase -q --continue
- $ ls
+ $ ls -A
+ .hg
backend.sparse
index.html
readme.txt
@@ -237,13 +250,15 @@
$ hg commit -m "delete profiles"
$ hg up -q ".^"
$ hg debugsparse --enable-profile backend.sparse
- $ ls
+ $ ls -A
+ .hg
index.html
readme.txt
$ hg up tip | grep warning
warning: sparse profile 'backend.sparse' not found in rev bfcb76de99cc - ignoring it
[1]
- $ ls
+ $ ls -A
+ .hg
data.py
index.html
readme.txt
--- a/tests/test-sparse-requirement.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse-requirement.t Thu Apr 16 22:51:09 2020 +0530
@@ -26,7 +26,8 @@
testonly-simplestore (reposimplestore !)
$ hg debugsparse --config extensions.sparse= --enable-profile frontend.sparse
- $ ls
+ $ ls -A
+ .hg
a.html
b.html
--- a/tests/test-sparse.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-sparse.t Thu Apr 16 22:51:09 2020 +0530
@@ -22,7 +22,8 @@
$ hg up -q 0
$ hg debugsparse --include 'hide'
- $ ls
+ $ ls -A
+ .hg
hide
Absolute paths outside the repo should just be rejected
@@ -77,7 +78,8 @@
$ echo z > hide
$ hg ci -Aqm 'edit hide'
- $ ls
+ $ ls -A
+ .hg
hide
$ hg manifest
hide
@@ -86,7 +88,8 @@
Verify --reset brings files back
$ hg debugsparse --reset
- $ ls
+ $ ls -A
+ .hg
hide
show
$ cat hide
@@ -106,11 +109,13 @@
Verify update only writes included files
$ hg up -q 0
- $ ls
+ $ ls -A
+ .hg
show
$ hg up -q 1
- $ ls
+ $ ls -A
+ .hg
show
show2
@@ -144,7 +149,8 @@
$ hg debugsparse --delete -f 'show*'
pending changes to 'hide'
- $ ls
+ $ ls -A
+ .hg
hide
hide2
hide3
@@ -166,7 +172,8 @@
$ hg debugsparse --exclude -f 'hide*'
pending changes to 'hide'
- $ ls
+ $ ls -A
+ .hg
hide
hide3
show
@@ -177,7 +184,8 @@
$ hg up -qC .
TODO: add an option to purge to also purge files outside the sparse config?
$ hg purge --all --config extensions.purge=
- $ ls
+ $ ls -A
+ .hg
hide
hide3
show
@@ -216,7 +224,8 @@
rebase aborted
$ rm hide.orig
- $ ls
+ $ ls -A
+ .hg
show
show2
--- a/tests/test-ssh-bundle1.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-ssh-bundle1.t Thu Apr 16 22:51:09 2020 +0530
@@ -482,7 +482,7 @@
sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
sending hello command
sending between command
- remote: 440 (sshv1 !)
+ remote: 463 (sshv1 !)
protocol upgraded to exp-ssh-v2-0003 (sshv2 !)
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1 (sshv1 !)
--- a/tests/test-ssh-proto-unbundle.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-ssh-proto-unbundle.t Thu Apr 16 22:51:09 2020 +0530
@@ -56,8 +56,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -109,8 +109,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -235,8 +235,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -293,8 +293,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -359,8 +359,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -418,8 +418,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -485,8 +485,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -543,8 +543,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -609,8 +609,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -668,8 +668,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -735,8 +735,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -796,8 +796,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -865,8 +865,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -923,8 +923,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -989,8 +989,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1050,8 +1050,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1119,8 +1119,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1180,8 +1180,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1255,8 +1255,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1314,8 +1314,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1382,8 +1382,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1441,8 +1441,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1511,8 +1511,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1572,8 +1572,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1650,8 +1650,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1715,8 +1715,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1788,8 +1788,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1843,8 +1843,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
@@ -1918,8 +1918,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1977,8 +1977,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending unbundle command
--- a/tests/test-ssh-proto.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-ssh-proto.t Thu Apr 16 22:51:09 2020 +0530
@@ -64,7 +64,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -86,8 +86,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
`hg debugserve --sshstdio` works
@@ -96,7 +96,7 @@
$ hg debugserve --sshstdio << EOF
> hello
> EOF
- 440
+ 463
capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
I/O logging works
@@ -106,24 +106,24 @@
> EOF
e> flush() -> None
o> write(4) -> 4:
- o> 440\n
- o> write(440) -> 440:
+ o> 463\n
+ o> write(463) -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
- 440
+ 463
capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> flush() -> None
$ hg debugserve --sshstdio --logiofile $TESTTMP/io << EOF
> hello
> EOF
- 440
+ 463
capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
$ cat $TESTTMP/io
e> flush() -> None
o> write(4) -> 4:
- o> 440\n
- o> write(440) -> 440:
+ o> 463\n
+ o> write(463) -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> flush() -> None
@@ -149,8 +149,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -187,7 +187,7 @@
remote: banner: line 7
remote: banner: line 8
remote: banner: line 9
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -245,8 +245,8 @@
o> readline() -> 15:
o> banner: line 9\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -297,12 +297,12 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
+ o> 463\n
i> write(98) -> 98:
i> between\n
i> pairs 81\n
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
- o> readline() -> 440:
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -316,7 +316,7 @@
sending hello command
sending between command
remote: 0
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -365,8 +365,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -390,7 +390,7 @@
remote: 0
remote: 0
remote: 0
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -447,8 +447,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -494,8 +494,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -539,8 +539,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -609,8 +609,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
Incomplete dictionary send
@@ -691,8 +691,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -725,8 +725,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -768,8 +768,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -797,8 +797,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(105) -> 105:
i> between\n
@@ -838,8 +838,8 @@
i> pairs 81\n
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -887,8 +887,8 @@
o> readline() -> 41:
o> 68986213bd4485ea51533535e3fc9e78007a711f\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -914,7 +914,7 @@
o> readline() -> 41:
o> 68986213bd4485ea51533535e3fc9e78007a711f\n
o> readline() -> 4:
- o> 440\n
+ o> 463\n
Send an upgrade request to a server that doesn't support that command
@@ -943,8 +943,8 @@
i> pairs 81\n
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -962,7 +962,7 @@
sending hello command
sending between command
remote: 0
- remote: 440
+ remote: 463
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -1005,8 +1005,8 @@
o> readline() -> 44:
o> upgraded this-is-some-token exp-ssh-v2-0003\n
o> readline() -> 4:
- o> 439\n
- o> readline() -> 440:
+ o> 462\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
$ cd ..
@@ -1062,6 +1062,8 @@
changegroup
01
02
+ checkheads
+ related
digests
md5
sha1
@@ -1112,14 +1114,14 @@
o> readline() -> 44:
o> upgraded this-is-some-token exp-ssh-v2-0003\n
o> readline() -> 4:
- o> 439\n
- o> readline() -> 440:
+ o> 462\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 424\n
- o> readline() -> 424:
+ o> 447\n
+ o> readline() -> 447:
o> capabilities: branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
Multiple upgrades is not allowed
@@ -1150,8 +1152,8 @@
o> readline() -> 44:
o> upgraded this-is-some-token exp-ssh-v2-0003\n
o> readline() -> 4:
- o> 439\n
- o> readline() -> 440:
+ o> 462\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(45) -> 45:
i> upgrade another-token proto=irrelevant\n
@@ -1222,8 +1224,8 @@
i> write(6) -> 6:
i> hello\n
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
i> write(98) -> 98:
i> between\n
@@ -1341,8 +1343,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1379,8 +1381,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1429,8 +1431,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1459,8 +1461,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1490,8 +1492,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1523,8 +1525,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1557,8 +1559,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1593,8 +1595,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1632,8 +1634,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1672,8 +1674,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending pushkey command
@@ -1724,8 +1726,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1757,8 +1759,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1807,8 +1809,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1845,8 +1847,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1884,8 +1886,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1920,8 +1922,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -1957,8 +1959,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -1990,8 +1992,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending listkeys command
@@ -2028,8 +2030,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -2069,8 +2071,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending pushkey command
@@ -2135,8 +2137,8 @@
i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
i> flush() -> None
o> readline() -> 4:
- o> 440\n
- o> readline() -> 440:
+ o> 463\n
+ o> readline() -> 463:
o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n
o> readline() -> 2:
o> 1\n
@@ -2175,8 +2177,8 @@
o> readline() -> 62:
o> upgraded * exp-ssh-v2-0003\n (glob)
o> readline() -> 4:
- o> 439\n
- o> read(439) -> 439: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ o> 462\n
+ o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
o> read(1) -> 1:
o> \n
sending batch with 3 sub-commands
--- a/tests/test-ssh.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-ssh.t Thu Apr 16 22:51:09 2020 +0530
@@ -327,9 +327,9 @@
remote: adding changesets
remote: adding manifests
remote: adding file changes
- remote: added 1 changesets with 1 changes to 1 files
remote: KABOOM
remote: KABOOM IN PROCESS
+ remote: added 1 changesets with 1 changes to 1 files
#endif
@@ -513,7 +513,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 440 (sshv1 !)
+ remote: 463 (sshv1 !)
protocol upgraded to exp-ssh-v2-0003 (sshv2 !)
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1 (sshv1 !)
@@ -532,7 +532,7 @@
no changes found
devel-peer-request: getbundle
devel-peer-request: bookmarks: 1 bytes
- devel-peer-request: bundlecaps: 266 bytes
+ devel-peer-request: bundlecaps: 289 bytes
devel-peer-request: cg: 1 bytes
devel-peer-request: common: 122 bytes
devel-peer-request: heads: 122 bytes
--- a/tests/test-strip.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-strip.t Thu Apr 16 22:51:09 2020 +0530
@@ -591,6 +591,18 @@
phases: 2 draft
mq: 3 unapplied
+ $ hg log --graph
+ @ changeset: 1:76dcf9fab855
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: b
+ |
+ % changeset: 0:9ab35a2d17cb
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: a
+
$ echo c > b
$ hg strip tip
abort: uncommitted changes
--- a/tests/test-subrepo-deep-nested-change.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-subrepo-deep-nested-change.t Thu Apr 16 22:51:09 2020 +0530
@@ -355,6 +355,11 @@
R sub1/sub2/folder/test.txt
! sub1/.hgsub
? sub1/x.hgsub
+ $ hg status -R sub1
+ warning: subrepo spec file 'sub1/.hgsub' not found
+ R .hgsubstate
+ ! .hgsub
+ ? x.hgsub
$ mv sub1/x.hgsub sub1/.hgsub
$ hg update -Cq
$ touch sub1/foo
--- a/tests/test-subrepo-missing.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-subrepo-missing.t Thu Apr 16 22:51:09 2020 +0530
@@ -50,7 +50,8 @@
$ hg st
warning: subrepo spec file '.hgsub' not found
! .hgsub
- $ ls subrepo
+ $ ls -A subrepo
+ .hg
a
delete .hgsubstate and update
@@ -65,7 +66,8 @@
use (c)hanged version or leave (d)eleted? c
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg st
- $ ls subrepo
+ $ ls -A subrepo
+ .hg
a
Enable obsolete
@@ -134,7 +136,10 @@
1: repository $TESTTMP/repo/subrepo not found
3: repository $TESTTMP/repo/subrepo not found
4: repository $TESTTMP/repo/subrepo not found
- $ ls
+ $ ls -A
+ .hg
+ .hgsub
+ .hgsubstate
b
$ mv b subrepo
--- a/tests/test-subrepo.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-subrepo.t Thu Apr 16 22:51:09 2020 +0530
@@ -498,7 +498,10 @@
abort: subrepos not enabled
(see 'hg help config.subrepos' for details)
[255]
- $ ls tc2
+ $ ls -A tc2
+ .hg
+ .hgsub
+ .hgsubstate
a
$ hg clone t tc3 --config subrepos.allowed=false
@@ -506,7 +509,10 @@
abort: subrepos not enabled
(see 'hg help config.subrepos' for details)
[255]
- $ ls tc3
+ $ ls -A tc3
+ .hg
+ .hgsub
+ .hgsubstate
a
And again with just the hg type disabled
@@ -516,7 +522,10 @@
abort: hg subrepos not allowed
(see 'hg help config.subrepos' for details)
[255]
- $ ls tc4
+ $ ls -A tc4
+ .hg
+ .hgsub
+ .hgsubstate
a
$ hg clone t tc5 --config subrepos.hg:allowed=false
@@ -524,7 +533,10 @@
abort: hg subrepos not allowed
(see 'hg help config.subrepos' for details)
[255]
- $ ls tc5
+ $ ls -A tc5
+ .hg
+ .hgsub
+ .hgsubstate
a
push
--- a/tests/test-tags.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-tags.t Thu Apr 16 22:51:09 2020 +0530
@@ -103,6 +103,9 @@
0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
+ $ hg debugtagscache
+ 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing/invalid
+ 1 b9154636be938d3d431e75a7c906504a079bfe07 26b7b4a773e09ee3c52f510e19e05e1ff966d859
Repeat with cold tag cache:
@@ -368,6 +371,24 @@
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6
+On junk data + missing cache entries, hg also overwrites the junk.
+
+ $ rm -f .hg/cache/tags2-visible
+ >>> import os
+ >>> with open(".hg/cache/hgtagsfnodes1", "ab+") as fp:
+ ... fp.seek(-10, os.SEEK_END) and None
+ ... fp.truncate() and None
+
+ $ hg debugtagscache | tail -2
+ 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
+ 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing/invalid
+ $ hg tags
+ tip 5:8dbfe60eff30
+ bar 1:78391a272241
+ $ hg debugtagscache | tail -2
+ 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
+ 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8af31de17fab7422878ee5a2dadbc943d
+
#if unix-permissions no-root
Errors writing to .hgtags fnodes cache are silently ignored
--- a/tests/test-template-functions.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-template-functions.t Thu Apr 16 22:51:09 2020 +0530
@@ -820,6 +820,8 @@
{"branch": "default"}
$ hg log -r0 -T '{date|json}\n'
[0, 0]
+ $ hg log -r0 -T '{revset(":")|json}\n'
+ [0, 1]
Test json filter applied to map result:
@@ -1263,6 +1265,41 @@
5:13207e5a10d9fd28ec424934298e176197f2c67f,
4:bbe44766e73d5f11ed2177f1838de10c53ef3e74
+for historical reasons, revset() supports old-style list template
+
+ $ hg log -T '{revset(":")}\n' -l1 \
+ > --config templates.start_revisions='"["' \
+ > --config templates.end_revisions='"]"' \
+ > --config templates.revision='"{revision}, "' \
+ > --config templates.last_revision='"{revision}"'
+ [0, 1, 2]
+ $ hg log -T '{revset(":") % " {revision}"}\n' -l1
+ 0 1 2
+
+but a filtered one doesn't
+
+ $ hg log -T '{filter(revset(":"), ifeq(rev, 1, "", "y"))}\n' -l1 \
+ > --config templates.start_revisions='"["' \
+ > --config templates.end_revisions='"]"' \
+ > --config templates.revision='"{revision}, "' \
+ > --config templates.last_revision='"{revision}"'
+ 0 2
+ $ hg log -T '{filter(revset(":"), ifeq(rev, 1, "", "y")) % "x{revision}"}\n' -l1
+ xx
+
+%d parameter handling:
+
+ $ hg log -T '{revset("%d", rev)}\n' -r'wdir()'
+ 2147483647
+ $ hg log -T '{revset("%d", rev)}\n' -r'null'
+ -1
+ $ hg log -T '{revset("%d", rev + 1)}\n' -r'tip'
+ abort: unknown revision '3'!
+ [255]
+ $ hg log -T '{revset("%d", rev - 1)}\n' -r'null'
+ abort: unknown revision '-2'!
+ [255]
+
Invalid arguments passed to revset()
$ hg log -T '{revset("%whatever", 0)}\n'
@@ -1305,6 +1342,13 @@
hg: parse error: invalid argument for revspec
[255]
+Invalid operation on revset()
+
+ $ hg log -T '{get(revset(":"), "foo")}\n'
+ hg: parse error: not a dictionary
+ (get() expects a dict as first argument)
+ [255]
+
Test files function
$ hg log -T "{rev}\n{join(files('*'), '\n')}\n"
@@ -1555,6 +1599,32 @@
}
]
+ $ hg log -T "{revset(':')|cbor}" -R a -l1 | "$PYTHON" "$TESTTMP/decodecbor.py"
+ [
+ [
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10
+ ]
+ ]
+
+ $ hg log -T "{dict(foo=revset('.'))|cbor}" -R a -l1 | "$PYTHON" "$TESTTMP/decodecbor.py"
+ [
+ {
+ 'foo': [
+ 10
+ ]
+ }
+ ]
+
json filter should escape HTML tags so that the output can be embedded in hgweb:
$ hg log -T "{'<foo@example.org>'|json}\n" -R a -l1
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-template-graph.t Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,440 @@
+Test graph-related template functions
+=====================================
+
+ $ cat <<'EOF' >> $HGRCPATH
+ > [extensions]
+ > drawdag = $RUNTESTDIR/drawdag.py
+ > EOF
+
+ $ hg init a
+ $ cd a
+
+ $ hg debugdrawdag <<'EOF'
+ > l
+ > / \
+ > | k
+ > | |\
+ > | | j
+ > | | |
+ > i | |
+ > |\ | |
+ > h | | |
+ > | | | |
+ > | g | |
+ > | | | |
+ > f | | |
+ > | |/ /
+ > | e |
+ > | |/
+ > | d
+ > |/|
+ > c |
+ > | |
+ > b |
+ > |
+ > a
+ > EOF
+
+ $ hg log -Gq -T'{rev} {tags}\n'
+ o 11 l tip
+ |\
+ | o 10 i
+ | |\
+ o \ \ 9 k
+ |\ \ \
+ +-----o 8 g
+ | | |
+ | o | 7 j
+ | | |
+ | | o 6 h
+ | | |
+ o | | 5 e
+ |/ /
+ | o 4 f
+ | |
+ o | 3 d
+ |\|
+ | o 2 c
+ | |
+ | o 1 b
+ |
+ o 0 a
+
+
+ $ cd ..
+
+Create repository containing merges of p1 > p2:
+
+ $ hg init named-branch-order
+ $ cd named-branch-order
+
+ $ hg branch -q b0
+ $ hg ci -m 0
+ $ hg up -q null
+ $ hg branch -q b1
+ $ hg ci -m 1
+ $ hg up -q null
+ $ hg branch -q b2
+ $ hg ci -m 2
+ $ hg merge -q 1
+ $ hg ci -m 3
+ $ hg ci -m 4 --config ui.allowemptycommit=true
+ $ hg merge -q 0
+ $ hg ci -m 5
+ $ hg ci -m 6 --config ui.allowemptycommit=true
+ $ hg up -q 1
+ $ hg branch -q b7
+ $ hg ci -m 7
+ $ hg ci -m 8 --config ui.allowemptycommit=true
+ $ hg up -q 6
+ $ hg ci -m 9 --config ui.allowemptycommit=true
+ $ hg up -q 8
+ $ hg merge -q 9
+ $ hg ci -m 10
+
+ $ hg log -Gq -T'{rev} {branch} -> {p1.rev} {p2.rev}\n'
+ @ 10 b7 -> 8 9
+ |\
+ | o 9 b2 -> 6 -1
+ | |
+ o | 8 b7 -> 7 -1
+ | |
+ o | 7 b7 -> 1 -1
+ | |
+ | o 6 b2 -> 5 -1
+ | |
+ | o 5 b2 -> 4 0
+ | |\
+ | | o 4 b2 -> 3 -1
+ | | |
+ +---o 3 b2 -> 2 1
+ | | |
+ | | o 2 b2 -> -1 -1
+ | |
+ o | 1 b1 -> -1 -1
+ /
+ o 0 b0 -> -1 -1
+
+
+ $ cd ..
+
+subsetparents
+-------------
+
+ $ cd a
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+i"))}\n' -r 'c+i'
+ o 10 i: 2
+ :
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+h+i"))}\n' -r 'c+h+i'
+ o 10 i: 6
+ |\
+ o : 6 h: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+h+l"))}\n' -r 'c+h+l'
+ o 11 l tip: 6
+ :\
+ : o 6 h: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+f+l"))}\n' -r 'c+f+l'
+ o 11 l tip: 4
+ :\
+ : o 4 f: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+h+i+k"))}\n' -r 'c+h+i+k'
+ o 10 i: 6
+ |\
+ | : o 9 k: 2
+ | :/
+ o : 6 h: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+d+h+i+k"))}\n' -r 'c+d+h+i+k'
+ o 10 i: 6 3
+ |\
+ | : o 9 k: 3
+ | :/
+ o : 6 h: 2
+ : :
+ : o 3 d: 2
+ :/|
+ : ~
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+j+k+i"))}\n' -r 'c+j+k+i'
+ o 10 i: 2
+ :
+ : o 9 k: 7
+ :/|
+ : o 7 j: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("c+e+f+j"))}\n' -r 'c+e+f+j'
+ o 7 j: 2
+ :
+ : o 5 e: 2
+ :/
+ : o 4 f: 2
+ :/
+ o 2 c:
+ |
+ ~
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("b+e+f+j"))}\n' -r 'b+e+f+j'
+ o 7 j: 1
+ :
+ : o 5 e: 1
+ :/
+ : o 4 f: 1
+ :/
+ o 1 b:
+
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("a+c+f+g+j+l"))}\n' -r 'a+c+f+g+j+l'
+ o 11 l tip: 4 8 7
+ :\
+ : \
+ : :\
+ : : \
+ : : :\
+ : : : \
+ : : : :\
+ : o---+ : 8 g: 0 2
+ : :/ / /
+ : +---o 7 j: 0 2
+ : : :/
+ o---+ 4 f: 2
+ / /
+ : o 2 c:
+ : |
+ : ~
+ o 0 a:
+
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("b+i+l"))}\n' -r 'b+i+l'
+ o 11 l tip: 10
+ |\
+ o : 10 i: 1
+ :/
+ o 1 b:
+
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("b+i+j+l"))}\n' -r 'b+i+j+l'
+ o 11 l tip: 10 7
+ |\
+ | \
+ | :\
+ o : : 10 i: 1
+ :/ /
+ : o 7 j: 1
+ :/
+ o 1 b:
+
+
+null in subset:
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("null+a+c+f"))}\n' -r 'null+a+c+f'
+ o 4 f: 2
+ |
+ o 2 c: -1
+ :
+ : o 0 a: -1
+ :/
+ @ -1 : -1
+
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("null+a+b+c+f"))}\n' -r 'null+a+b+c+f'
+ o 4 f: 2
+ |
+ o 2 c: 1
+ |
+ o 1 b: -1
+ |
+ | o 0 a: -1
+ |/
+ @ -1 : -1
+
+
+wdir in subset:
+
+ $ hg update -qC i
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("f+k+wdir()"))}\n' -r 'f+k+wdir()'
+ o 2147483647 : 4
+ :
+ : o 9 k:
+ : |\
+ : ~ ~
+ o 4 f:
+ |
+ ~
+
+ $ hg update -qC null
+
+Revisions not in subset:
+
+ $ hg log -T '{rev} {tags}: {subsetparents(rev, revset("a+c+f+g+j+l"))}\n'
+ 11 l tip: 4 8 7
+ 10 i:
+ 9 k:
+ 8 g: 0 2
+ 7 j: 0 2
+ 6 h:
+ 5 e:
+ 4 f: 2
+ 3 d:
+ 2 c:
+ 1 b:
+ 0 a:
+
+ $ hg log -T '{rev} {tags}: {subsetparents(rev, revset("b+c"))}\n'
+ 11 l tip:
+ 10 i:
+ 9 k:
+ 8 g:
+ 7 j:
+ 6 h:
+ 5 e:
+ 4 f:
+ 3 d:
+ 2 c: 1
+ 1 b:
+ 0 a:
+
+ $ hg log -T '{rev} {tags}: {subsetparents(rev, revset("b+c"))}\n' -r'reverse(null:2)'
+ 2 c: 1
+ 1 b:
+ 0 a:
+ -1 :
+
+Nothing excluded:
+
+ $ hg log -T '{rev} {tags}: {subsetparents(rev, revset("null:wdir()"))}\n' -r'reverse(null:wdir())'
+ 2147483647 : -1
+ 11 l tip: 10 9
+ 10 i: 6 8
+ 9 k: 5 7
+ 8 g: 5
+ 7 j: 3
+ 6 h: 4
+ 5 e: 3
+ 4 f: 2
+ 3 d: 0 2
+ 2 c: 1
+ 1 b: -1
+ 0 a: -1
+ -1 : -1
+
+Uncachable query:
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("%d:%d", rev, rev - 1))}\n'
+ o 11 l tip: 10
+ |\
+ | o 10 i:
+ | |\
+ o \ \ 9 k:
+ |\ \ \
+ +-----o 8 g:
+ | | |
+ | o | 7 j:
+ | | |
+ | | o 6 h:
+ | | |
+ o | | 5 e:
+ |/ /
+ | o 4 f:
+ | |
+ o | 3 d: 2
+ |\|
+ | o 2 c: 1
+ | |
+ | o 1 b:
+ |
+ o 0 a: -1
+
+
+Invalid arguments:
+
+ $ hg log -T '{subsetparents()}\n'
+ hg: parse error: subsetparents expects two arguments
+ [255]
+ $ hg log -T '{subsetparents("a")}\n'
+ hg: parse error: subsetparents expects two arguments
+ [255]
+ $ hg log -T '{subsetparents(rev, extras)}\n'
+ hg: parse error: subsetparents expects a queried revset
+ [255]
+
+ $ cd ..
+
+subsetparents: p1/p2 order
+-------------------------
+
+ $ cd named-branch-order
+
+Parents should be sorted in p1/p2 order since p1 is likely to belong to
+the same named branch:
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("0+1+2+6"))}\n' -r '0+1+2+6'
+ o 6 : 2 1 0
+ :\
+ : \
+ : :\
+ : : o 2 :
+ : :
+ : o 1 :
+ :
+ o 0 :
+
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("0+1+2+6+10"))}\n' -r '0+1+2+6+10'
+ @ 10 tip: 6
+ :\
+ : o 6 : 2 1 0
+ :/:\
+ : : o 2 :
+ : :
+ o : 1 :
+ /
+ o 0 :
+
+
+And p1 path should be selected if both p1/p2 paths exist:
+
+ $ hg log -Gq -T '{rev} {tags}: {subsetparents(rev, revset("0+1+2+10"))}\n' -r '0+1+2+10'
+ @ 10 tip: 1 2 0
+ :\
+ : \
+ : :\
+ : : o 2 :
+ : :
+ o : 1 :
+ /
+ o 0 :
+
+
+ $ cd ..
--- a/tests/test-uncommit.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-uncommit.t Thu Apr 16 22:51:09 2020 +0530
@@ -60,7 +60,8 @@
$ touch files
$ hg add files
$ for i in a ab abc abcd abcde; do echo $i > files; echo $i > file-$i; hg add file-$i; hg commit -m "added file-$i"; done
- $ ls
+ $ ls -A
+ .hg
file-a
file-ab
file-abc
@@ -489,7 +490,7 @@
$ hg add b
$ hg status
A b
- $ hg unc a
+ $ hg uncommit a
note: keeping empty commit
$ cat a
super critical info!
@@ -503,11 +504,11 @@
$ hg ci -Am 'add b'
$ echo 'foo bar' > b
- $ hg unc b
+ $ hg uncommit b
abort: uncommitted changes
(requires --allow-dirty-working-copy to uncommit)
[255]
- $ hg unc --allow-dirty-working-copy b
+ $ hg uncommit --allow-dirty-working-copy b
$ hg log
changeset: 3:30fa958635b2
tag: tip
--- a/tests/test-up-local-change.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-up-local-change.t Thu Apr 16 22:51:09 2020 +0530
@@ -40,8 +40,6 @@
summary: 1
$ hg --debug up
- unmatched files in other:
- b
resolving manifests
branchmerge: False, force: False, partial: False
ancestor: c19d34741b0a, local: c19d34741b0a+, remote: 1e71731e6fbb
@@ -91,8 +89,6 @@
summary: 1
$ hg --debug up
- unmatched files in other:
- b
resolving manifests
branchmerge: False, force: False, partial: False
ancestor: c19d34741b0a, local: c19d34741b0a+, remote: 1e71731e6fbb
--- a/tests/test-update-atomic.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-update-atomic.t Thu Apr 16 22:51:09 2020 +0530
@@ -1,4 +1,4 @@
-#require execbit unix-permissions
+#require execbit unix-permissions no-chg
Checking that experimental.atomic-file works.
--- a/tests/test-update-branches.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-update-branches.t Thu Apr 16 22:51:09 2020 +0530
@@ -189,17 +189,17 @@
parent=2
$ revtest '-cC dirty linear' dirty 1 2 -cC
- abort: can only specify one of -C/--clean, -c/--check, or -m/--merge
+ abort: cannot specify both --clean and --check
parent=1
M foo
$ revtest '-mc dirty linear' dirty 1 2 -mc
- abort: can only specify one of -C/--clean, -c/--check, or -m/--merge
+ abort: cannot specify both --check and --merge
parent=1
M foo
$ revtest '-mC dirty linear' dirty 1 2 -mC
- abort: can only specify one of -C/--clean, -c/--check, or -m/--merge
+ abort: cannot specify both --clean and --merge
parent=1
M foo
@@ -249,6 +249,19 @@
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges
[1]
+ $ hg log -G --template '{rev}:{node|short} {parents} {branches}\n'
+ o 5:ff252e8273df b1
+ |
+ @ 4:d047485b3896 0:60829823a42a b1
+ |
+ | % 3:6efa171f091b 1:0786582aa4b1
+ | |
+ | | o 2:bd10386d478c
+ | |/
+ | o 1:0786582aa4b1
+ |/
+ o 0:60829823a42a
+
$ hg st
M a
? a.orig
@@ -330,6 +343,21 @@
$ hg resolve -l
U a
+Try to make empty commit while there are conflicts
+ $ hg revert -r . a
+ $ rm a.orig
+ $ hg ci -m empty
+ abort: unresolved merge conflicts (see 'hg help resolve')
+ [255]
+ $ hg resolve -m a
+ (no more unresolved files)
+ $ hg resolve -l
+ R a
+ $ hg ci -m empty
+ nothing changed
+ [1]
+ $ hg resolve -l
+
Change/delete conflict is not allowed
$ hg up -qC 3
$ hg rm foo
--- a/tests/test-update-reverse.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-update-reverse.t Thu Apr 16 22:51:09 2020 +0530
@@ -12,7 +12,8 @@
'main' should be gone:
- $ ls
+ $ ls -A
+ .hg
a
$ touch side1
@@ -59,7 +60,8 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: Added main
- $ ls
+ $ ls -A
+ .hg
a
side1
side2
@@ -76,7 +78,8 @@
getting main
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
- $ ls
+ $ ls -A
+ .hg
a
main
--- a/tests/test-wireproto-command-capabilities.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-wireproto-command-capabilities.t Thu Apr 16 22:51:09 2020 +0530
@@ -150,7 +150,7 @@
s> Content-Type: application/mercurial-cbor\r\n
s> Content-Length: *\r\n (glob)
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
cbor> [
{
b'apibase': b'api/',
@@ -190,7 +190,7 @@
s> Content-Type: application/mercurial-cbor\r\n
s> Content-Length: *\r\n (glob)
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
cbor> [
{
b'apibase': b'api/',
@@ -223,7 +223,7 @@
s> Content-Type: application/mercurial-cbor\r\n
s> Content-Length: *\r\n (glob)
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
cbor> [
{
b'apibase': b'api/',
@@ -484,7 +484,7 @@
s> Content-Type: application/mercurial-cbor\r\n
s> Content-Length: *\r\n (glob)
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
sending capabilities command
s> setsockopt(6, 1, 1) -> None (?)
s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
--- a/tests/test-wireproto-content-redirects.t Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/test-wireproto-content-redirects.t Thu Apr 16 22:51:09 2020 +0530
@@ -66,9 +66,9 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-cbor\r\n
- s> Content-Length: 2285\r\n
+ s> Content-Length: 2308\r\n
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
(remote redirect target target-a is compatible) (tls1.2 !)
(remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !)
sending capabilities command
@@ -396,9 +396,9 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-cbor\r\n
- s> Content-Length: 2312\r\n
+ s> Content-Length: 2335\r\n
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
(remote redirect target target-a is compatible)
(remote redirect target target-b uses unsupported protocol: unknown)
sending capabilities command
@@ -731,9 +731,9 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-cbor\r\n
- s> Content-Length: 2272\r\n
+ s> Content-Length: 2295\r\n
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
(redirect target target-bad-tls requires SNI, which is unsupported)
sending capabilities command
s> setsockopt(6, 1, 1) -> None (?)
@@ -1055,9 +1055,9 @@
s> Server: testing stub value\r\n
s> Date: $HTTP_DATE$\r\n
s> Content-Type: application/mercurial-cbor\r\n
- s> Content-Length: 2278\r\n
+ s> Content-Length: 2301\r\n
s> \r\n
- s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
(remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42)
sending capabilities command
s> setsockopt(6, 1, 1) -> None (?)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/crash_transaction_late.py Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,32 @@
+# tiny extension to abort a transaction very late during test
+#
+# Copyright 2020 Pierre-Yves David <pierre-yves.david@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from mercurial import (
+ error,
+ transaction,
+)
+
+
+def abort(fp):
+ raise error.Abort(b"This is a late abort")
+
+
+def reposetup(ui, repo):
+
+ transaction.postfinalizegenerators.add(b'late-abort')
+
+ class LateAbortRepo(repo.__class__):
+ def transaction(self, *args, **kwargs):
+ tr = super(LateAbortRepo, self).transaction(*args, **kwargs)
+ tr.addfilegenerator(
+ b'late-abort', [b'late-abort'], abort, order=9999999
+ )
+ return tr
+
+ repo.__class__ = LateAbortRepo
--- a/tests/testlib/ext-phase-report.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/testlib/ext-phase-report.py Thu Apr 16 22:51:09 2020 +0530
@@ -5,21 +5,22 @@
def reposetup(ui, repo):
def reportphasemove(tr):
- for rev, move in sorted(tr.changes[b'phases'].items()):
- if move[0] is None:
- ui.write(
- (
- b'test-debug-phase: new rev %d: x -> %d\n'
- % (rev, move[1])
+ for revs, move in sorted(tr.changes[b"phases"], key=lambda r: r[0][0]):
+ for rev in revs:
+ if move[0] is None:
+ ui.write(
+ (
+ b'test-debug-phase: new rev %d: x -> %d\n'
+ % (rev, move[1])
+ )
)
- )
- else:
- ui.write(
- (
- b'test-debug-phase: move rev %d: %d -> %d\n'
- % (rev, move[0], move[1])
+ else:
+ ui.write(
+ (
+ b'test-debug-phase: move rev %d: %d -> %d\n'
+ % (rev, move[0], move[1])
+ )
)
- )
class reportphaserepo(repo.__class__):
def transaction(self, *args, **kwargs):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/wait-on-file Thu Apr 16 22:51:09 2020 +0530
@@ -0,0 +1,38 @@
+#!/bin/bash
+#
+# wait up to TIMEOUT seconds until a WAIT_ON_FILE is created.
+#
+# In addition, this script can create CREATE_FILE once it is ready to wait.
+
+if [ $# -lt 2 ] || [ $# -gt 3 ]; then
+ echo $#
+ echo "USAGE: $0 TIMEOUT WAIT_ON_FILE [CREATE_FILE]"
+fi
+
+timer="$1"
+
+# if the test timeout have been extended, explicitly extend the provided timer
+if [ "$HGTEST_TIMEOUT_DEFAULT" -lt "$HGTEST_TIMEOUT" ]; then
+ timer=$(( ($timer * $HGTEST_TIMEOUT) / $HGTEST_TIMEOUT_DEFAULT ))
+fi
+
+wait_on="$2"
+create=""
+if [ $# -eq 3 ]; then
+ create="$3"
+fi
+
+if [ -n "$create" ];
+then
+ touch "$create"
+ create=""
+fi
+while [ "$timer" -gt 0 ] && [ ! -f "$wait_on" ];
+do
+ timer=$(( $timer - 1))
+ sleep 0.01
+done
+if [ "$timer" -le 0 ]; then
+ echo "file not created after $1 seconds: $wait_on" >&2
+ exit 1
+fi
--- a/tests/unwrap-message-id.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/unwrap-message-id.py Thu Apr 16 22:51:09 2020 +0530
@@ -1,6 +1,8 @@
from __future__ import absolute_import, print_function
-import re
import sys
-print(re.sub(r"(?<=Message-Id:) \n ", " ", sys.stdin.read()), end="")
+for line in sys.stdin:
+ if line.lower() in ("message-id: \n", "in-reply-to: \n"):
+ line = line[:-2]
+ print(line, end="")
--- a/tests/wireprotosimplecache.py Mon Apr 13 16:30:13 2020 +0300
+++ b/tests/wireprotosimplecache.py Thu Apr 16 22:51:09 2020 +0530
@@ -116,7 +116,7 @@
redirectable = False
else:
clienttargets = set(self.redirecttargets)
- ourtargets = set(t[b'name'] for t in loadredirecttargets(self.ui))
+ ourtargets = {t[b'name'] for t in loadredirecttargets(self.ui)}
# We only ever redirect to a single target (for now). So we don't
# need to store which target matched.