changeset 49527:a3356ab610fc stable 6.3rc0

branching: merge default into stable This marks the feature freeze for the 6.3 release
author Raphaël Gomès <rgomes@octobus.net>
date Mon, 24 Oct 2022 15:32:14 +0200
parents 192949b68159 (current diff) 52dd7a43ad5c (diff)
children f68d285158b2 c1cb90d72196
files contrib/perf-utils/compare-discovery-case mercurial/configitems.py mercurial/shelve.py tests/test-shelve.t tests/test-shelve2.t
diffstat 82 files changed, 2535 insertions(+), 380 deletions(-) [+]
line wrap: on
line diff
--- a/Makefile	Thu Oct 20 12:05:17 2022 -0400
+++ b/Makefile	Mon Oct 24 15:32:14 2022 +0200
@@ -203,9 +203,11 @@
 packaging_targets := \
   rhel7 \
   rhel8 \
+  rhel9 \
   deb \
   docker-rhel7 \
   docker-rhel8 \
+  docker-rhel9 \
   docker-debian-bullseye \
   docker-debian-buster \
   docker-debian-stretch \
--- a/contrib/heptapod-ci.yml	Thu Oct 20 12:05:17 2022 -0400
+++ b/contrib/heptapod-ci.yml	Mon Oct 24 15:32:14 2022 +0200
@@ -89,7 +89,7 @@
       - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
       - cd /tmp/mercurial-ci/
       - make local PYTHON=$PYTHON
-      - $PYTHON -m pip install --user -U pytype==2021.04.15
+      - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.03.29
     script:
       - echo "Entering script section"
       - sh contrib/check-pytype.sh
--- a/contrib/packaging/Makefile	Thu Oct 20 12:05:17 2022 -0400
+++ b/contrib/packaging/Makefile	Mon Oct 24 15:32:14 2022 +0200
@@ -15,7 +15,8 @@
 
 RHEL_RELEASES := \
   7 \
-  8
+  8 \
+  9
 
 # Build a Python for these RHEL (and derivatives) releases.
 RHEL_WITH_PYTHON_RELEASES :=
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/rhel9	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,25 @@
+FROM rockylinux/rockylinux:9
+
+RUN groupadd -g %GID% build && \
+    useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
+
+RUN dnf install 'dnf-command(config-manager)' -y
+# crb repository is necessary for docutils
+RUN dnf config-manager --set-enabled crb
+
+RUN yum install -y \
+	gcc \
+	gettext \
+	make \
+	python3-devel \
+	python3-docutils \
+	rpm-build
+
+# For creating repo meta data
+RUN yum install -y createrepo
+
+# For rust extensions
+RUN yum install -y cargo
+
+# avoid incorrect docker image permissions on /tmp preventing writes by non-root users
+RUN chmod 1777 /tmp
--- a/contrib/packaging/packagelib.sh	Thu Oct 20 12:05:17 2022 -0400
+++ b/contrib/packaging/packagelib.sh	Mon Oct 24 15:32:14 2022 +0200
@@ -9,7 +9,7 @@
 # node: the node|short hg was built from, or empty if built from a tag
 gethgversion() {
     if [ -z "${1+x}" ]; then
-        python="python"
+        python="python3"
     else
         python="$1"
     fi
--- a/contrib/perf-utils/compare-discovery-case	Thu Oct 20 12:05:17 2022 -0400
+++ b/contrib/perf-utils/compare-discovery-case	Mon Oct 24 15:32:14 2022 +0200
@@ -97,6 +97,16 @@
 assert set(VARIANTS.keys()) == set(VARIANTS_KEYS)
 
 
+def parse_case(case):
+    case_type, case_args = case.split('-', 1)
+    if case_type == 'file':
+        case_args = (case_args,)
+    else:
+        case_args = tuple(int(x) for x in case_args.split('-'))
+    case = (case_type,) + case_args
+    return case
+
+
 def format_case(case):
     return '-'.join(str(s) for s in case)
 
@@ -109,12 +119,41 @@
         return '::randomantichain(all(), "%d")' % case[1]
     elif t == 'rev':
         return '::%d' % case[1]
+    elif t == 'file':
+        return '::nodefromfile("%s")' % case[1]
     else:
         assert False
 
 
-def compare(repo, local_case, remote_case):
+def compare(
+    repo,
+    local_case,
+    remote_case,
+    display_header=True,
+    display_case=True,
+):
     case = (repo, local_case, remote_case)
+    if display_header:
+        pieces = ['#']
+        if display_case:
+            pieces += [
+                "repo",
+                "local-subset",
+                "remote-subset",
+            ]
+
+        pieces += [
+            "discovery-variant",
+            "roundtrips",
+            "queries",
+            "revs",
+            "local-heads",
+            "common-heads",
+            "undecided-initial",
+            "undecided-common",
+            "undecided-missing",
+        ]
+        print(*pieces)
     for variant in VARIANTS_KEYS:
         res = process(case, VARIANTS[variant])
         revs = res["nb-revs"]
@@ -122,36 +161,31 @@
         common_heads = res["nb-common-heads"]
         roundtrips = res["total-roundtrips"]
         queries = res["total-queries"]
-        if 'tree-discovery' in variant:
-            print(
+        pieces = []
+        if display_case:
+            pieces += [
                 repo,
                 format_case(local_case),
                 format_case(remote_case),
-                variant,
-                roundtrips,
-                queries,
-                revs,
-                local_heads,
-                common_heads,
-            )
-        else:
+            ]
+        pieces += [
+            variant,
+            roundtrips,
+            queries,
+            revs,
+            local_heads,
+            common_heads,
+        ]
+        if 'tree-discovery' not in variant:
             undecided_common = res["nb-ini_und-common"]
             undecided_missing = res["nb-ini_und-missing"]
             undecided = undecided_common + undecided_missing
-            print(
-                repo,
-                format_case(local_case),
-                format_case(remote_case),
-                variant,
-                roundtrips,
-                queries,
-                revs,
-                local_heads,
-                common_heads,
+            pieces += [
                 undecided,
                 undecided_common,
                 undecided_missing,
-            )
+            ]
+        print(*pieces)
     return 0
 
 
@@ -171,13 +205,23 @@
 
 
 if __name__ == '__main__':
-    if len(sys.argv) != 4:
+
+    argv = sys.argv[:]
+
+    kwargs = {}
+    # primitive arg parsing
+    if '--no-header' in argv:
+        kwargs['display_header'] = False
+        argv = [a for a in argv if a != '--no-header']
+    if '--no-case' in argv:
+        kwargs['display_case'] = False
+        argv = [a for a in argv if a != '--no-case']
+
+    if len(argv) != 4:
         usage = f'USAGE: {script_name} REPO LOCAL_CASE REMOTE_CASE'
         print(usage, file=sys.stderr)
         sys.exit(128)
-    repo = sys.argv[1]
-    local_case = sys.argv[2].split('-')
-    local_case = (local_case[0],) + tuple(int(x) for x in local_case[1:])
-    remote_case = sys.argv[3].split('-')
-    remote_case = (remote_case[0],) + tuple(int(x) for x in remote_case[1:])
-    sys.exit(compare(repo, local_case, remote_case))
+    repo = argv[1]
+    local_case = parse_case(argv[2])
+    remote_case = parse_case(argv[3])
+    sys.exit(compare(repo, local_case, remote_case, **kwargs))
--- a/contrib/perf.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/contrib/perf.py	Mon Oct 24 15:32:14 2022 +0200
@@ -925,6 +925,71 @@
     fm.end()
 
 
+@command(
+    b'perf::delta-find',
+    revlogopts + formatteropts,
+    b'-c|-m|FILE REV',
+)
+def perf_delta_find(ui, repo, arg_1, arg_2=None, **opts):
+    """benchmark the process of finding a valid delta for a revlog revision
+
+    When a revlog receives a new revision (e.g. from a commit, or from an
+    incoming bundle), it searches for a suitable delta-base to produce a delta.
+    This perf command measures how much time we spend in this process. It
+    operates on an already stored revision.
+
+    See `hg help debug-delta-find` for another related command.
+    """
+    from mercurial import revlogutils
+    import mercurial.revlogutils.deltas as deltautil
+
+    opts = _byteskwargs(opts)
+    if arg_2 is None:
+        file_ = None
+        rev = arg_1
+    else:
+        file_ = arg_1
+        rev = arg_2
+
+    repo = repo.unfiltered()
+
+    timer, fm = gettimer(ui, opts)
+
+    rev = int(rev)
+
+    revlog = cmdutil.openrevlog(repo, b'perf::delta-find', file_, opts)
+
+    deltacomputer = deltautil.deltacomputer(revlog)
+
+    node = revlog.node(rev)
+    p1r, p2r = revlog.parentrevs(rev)
+    p1 = revlog.node(p1r)
+    p2 = revlog.node(p2r)
+    full_text = revlog.revision(rev)
+    textlen = len(full_text)
+    cachedelta = None
+    flags = revlog.flags(rev)
+
+    revinfo = revlogutils.revisioninfo(
+        node,
+        p1,
+        p2,
+        [full_text],  # btext
+        textlen,
+        cachedelta,
+        flags,
+    )
+
+    # Note: we should probably purge the potential caches (like the full
+    # manifest cache) between runs.
+    def find_one():
+        with revlog._datafp() as fh:
+            deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
+
+    timer(find_one)
+    fm.end()
+
+
 @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH')
 def perfdiscovery(ui, repo, path, **opts):
     """benchmark discovery between local repo and the peer at given path"""
@@ -974,6 +1039,111 @@
     fm.end()
 
 
+@command(
+    b'perf::bundle',
+    [
+        (
+            b'r',
+            b'rev',
+            [],
+            b'changesets to bundle',
+            b'REV',
+        ),
+        (
+            b't',
+            b'type',
+            b'none',
+            b'bundlespec to use (see `hg help bundlespec`)',
+            b'TYPE',
+        ),
+    ]
+    + formatteropts,
+    b'REVS',
+)
+def perfbundle(ui, repo, *revs, **opts):
+    """benchmark the creation of a bundle from a repository
+
+    For now, this only supports "none" compression.
+    """
+    try:
+        from mercurial import bundlecaches
+
+        parsebundlespec = bundlecaches.parsebundlespec
+    except ImportError:
+        from mercurial import exchange
+
+        parsebundlespec = exchange.parsebundlespec
+
+    from mercurial import discovery
+    from mercurial import bundle2
+
+    opts = _byteskwargs(opts)
+    timer, fm = gettimer(ui, opts)
+
+    cl = repo.changelog
+    revs = list(revs)
+    revs.extend(opts.get(b'rev', ()))
+    revs = scmutil.revrange(repo, revs)
+    if not revs:
+        raise error.Abort(b"not revision specified")
+    # make it a consistent set (ie: without topological gaps)
+    old_len = len(revs)
+    revs = list(repo.revs(b"%ld::%ld", revs, revs))
+    if old_len != len(revs):
+        new_count = len(revs) - old_len
+        msg = b"add %d new revisions to make it a consistent set\n"
+        ui.write_err(msg % new_count)
+
+    targets = [cl.node(r) for r in repo.revs(b"heads(::%ld)", revs)]
+    bases = [cl.node(r) for r in repo.revs(b"heads(::%ld - %ld)", revs, revs)]
+    outgoing = discovery.outgoing(repo, bases, targets)
+
+    bundle_spec = opts.get(b'type')
+
+    bundle_spec = parsebundlespec(repo, bundle_spec, strict=False)
+
+    cgversion = bundle_spec.params.get(b"cg.version")
+    if cgversion is None:
+        if bundle_spec.version == b'v1':
+            cgversion = b'01'
+        if bundle_spec.version == b'v2':
+            cgversion = b'02'
+    if cgversion not in changegroup.supportedoutgoingversions(repo):
+        err = b"repository does not support bundle version %s"
+        raise error.Abort(err % cgversion)
+
+    if cgversion == b'01':  # bundle1
+        bversion = b'HG10' + bundle_spec.wirecompression
+        bcompression = None
+    elif cgversion in (b'02', b'03'):
+        bversion = b'HG20'
+        bcompression = bundle_spec.wirecompression
+    else:
+        err = b'perf::bundle: unexpected changegroup version %s'
+        raise error.ProgrammingError(err % cgversion)
+
+    if bcompression is None:
+        bcompression = b'UN'
+
+    if bcompression != b'UN':
+        err = b'perf::bundle: compression currently unsupported: %s'
+        raise error.ProgrammingError(err % bcompression)
+
+    def do_bundle():
+        bundle2.writenewbundle(
+            ui,
+            repo,
+            b'perf::bundle',
+            os.devnull,
+            bversion,
+            outgoing,
+            bundle_spec.params,
+        )
+
+    timer(do_bundle)
+    fm.end()
+
+
 @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE')
 def perfbundleread(ui, repo, bundlepath, **opts):
     """Benchmark reading of bundle files.
@@ -2498,6 +2668,60 @@
 
 
 @command(
+    b'perf::unbundle',
+    formatteropts,
+    b'BUNDLE_FILE',
+)
+def perf_unbundle(ui, repo, fname, **opts):
+    """benchmark application of a bundle in a repository.
+
+    This does not include the final transaction processing"""
+    from mercurial import exchange
+    from mercurial import bundle2
+
+    opts = _byteskwargs(opts)
+
+    with repo.lock():
+        bundle = [None, None]
+        orig_quiet = repo.ui.quiet
+        try:
+            repo.ui.quiet = True
+            with open(fname, mode="rb") as f:
+
+                def noop_report(*args, **kwargs):
+                    pass
+
+                def setup():
+                    gen, tr = bundle
+                    if tr is not None:
+                        tr.abort()
+                    bundle[:] = [None, None]
+                    f.seek(0)
+                    bundle[0] = exchange.readbundle(ui, f, fname)
+                    bundle[1] = repo.transaction(b'perf::unbundle')
+                    bundle[1]._report = noop_report  # silence the transaction
+
+                def apply():
+                    gen, tr = bundle
+                    bundle2.applybundle(
+                        repo,
+                        gen,
+                        tr,
+                        source=b'perf::unbundle',
+                        url=fname,
+                    )
+
+                timer, fm = gettimer(ui, opts)
+                timer(apply, setup=setup)
+                fm.end()
+        finally:
+            repo.ui.quiet == orig_quiet
+            gen, tr = bundle
+            if tr is not None:
+                tr.abort()
+
+
+@command(
     b'perf::unidiff|perfunidiff',
     revlogopts
     + formatteropts
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/pull_logger.py	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,141 @@
+# pull_logger.py - Logs pulls to a JSON-line file in the repo's VFS.
+#
+# Copyright 2022  Pacien TRAN-GIRARD <pacien.trangirard@pacien.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+
+'''logs pull parameters to a file
+
+This extension logs the pull parameters, i.e. the remote and common heads,
+when pulling from the local repository.
+
+The collected data should give an idea of the state of a pair of repositories
+and allow replaying past synchronisations between them. This is particularly
+useful for working on data exchange, bundling and caching-related
+optimisations.
+
+The record is a JSON-line file located in the repository's VFS at
+.hg/pull_log.jsonl.
+
+Log write failures are not considered fatal: log writes may be skipped for any
+reason such as insufficient storage or a timeout.
+
+Some basic log file rotation can be enabled by setting 'rotate-size' to a value
+greater than 0. This causes the current log file to be moved to
+.hg/pull_log.jsonl.rotated when this threshold is met, discarding any
+previously rotated log file.
+
+The timeouts of the exclusive lock used when writing to the lock file can be
+configured through the 'timeout.lock' and 'timeout.warn' options of this
+plugin. Those are not expected to be held for a significant time in practice.::
+
+  [pull-logger]
+  timeout.lock = 300
+  timeout.warn = 100
+  rotate-size = 1kb
+'''
+
+
+import json
+import time
+
+from mercurial.i18n import _
+from mercurial.utils import stringutil
+from mercurial import (
+    error,
+    extensions,
+    lock,
+    registrar,
+    wireprotov1server,
+)
+
+EXT_NAME = b'pull-logger'
+EXT_VERSION_CODE = 0
+
+LOG_FILE = b'pull_log.jsonl'
+OLD_LOG_FILE = LOG_FILE + b'.rotated'
+LOCK_NAME = LOG_FILE + b'.lock'
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+configitem(EXT_NAME, b'timeout.lock', default=600)
+configitem(EXT_NAME, b'timeout.warn', default=120)
+configitem(EXT_NAME, b'rotate-size', default=b'100MB')
+
+
+def wrap_getbundle(orig, repo, proto, others, *args, **kwargs):
+    heads, common = extract_pull_heads(others)
+    log_entry = {
+        'timestamp': time.time(),
+        'logger_version': EXT_VERSION_CODE,
+        'heads': sorted(heads),
+        'common': sorted(common),
+    }
+
+    try:
+        write_to_log(repo, log_entry)
+    except (IOError, error.LockError) as err:
+        msg = stringutil.forcebytestr(err)
+        repo.ui.warn(_(b'unable to append to pull log: %s\n') % msg)
+
+    return orig(repo, proto, others, *args, **kwargs)
+
+
+def extract_pull_heads(bundle_args):
+    opts = wireprotov1server.options(
+        b'getbundle',
+        wireprotov1server.wireprototypes.GETBUNDLE_ARGUMENTS.keys(),
+        bundle_args.copy(),  # this call consumes the args destructively
+    )
+
+    heads = opts.get(b'heads', b'').decode('utf-8').split(' ')
+    common = opts.get(b'common', b'').decode('utf-8').split(' ')
+    return (heads, common)
+
+
+def write_to_log(repo, entry):
+    locktimeout = repo.ui.configint(EXT_NAME, b'timeout.lock')
+    lockwarntimeout = repo.ui.configint(EXT_NAME, b'timeout.warn')
+    rotatesize = repo.ui.configbytes(EXT_NAME, b'rotate-size')
+
+    with lock.trylock(
+        ui=repo.ui,
+        vfs=repo.vfs,
+        lockname=LOCK_NAME,
+        timeout=locktimeout,
+        warntimeout=lockwarntimeout,
+    ):
+        if rotatesize > 0 and repo.vfs.exists(LOG_FILE):
+            if repo.vfs.stat(LOG_FILE).st_size >= rotatesize:
+                repo.vfs.rename(LOG_FILE, OLD_LOG_FILE)
+
+        with repo.vfs.open(LOG_FILE, b'a+') as logfile:
+            serialised = json.dumps(entry, sort_keys=True)
+            logfile.write(serialised.encode('utf-8'))
+            logfile.write(b'\n')
+            logfile.flush()
+
+
+def reposetup(ui, repo):
+    if repo.local():
+        repo._wlockfreeprefix.add(LOG_FILE)
+        repo._wlockfreeprefix.add(OLD_LOG_FILE)
+
+
+def uisetup(ui):
+    del wireprotov1server.commands[b'getbundle']
+    decorator = wireprotov1server.wireprotocommand(
+        name=b'getbundle',
+        args=b'*',
+        permission=b'pull',
+    )
+
+    extensions.wrapfunction(
+        container=wireprotov1server,
+        funcname='getbundle',
+        wrapper=wrap_getbundle,
+    )
+
+    decorator(wireprotov1server.getbundle)
--- a/hgext/fsmonitor/pywatchman/pybser.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/hgext/fsmonitor/pywatchman/pybser.py	Mon Oct 24 15:32:14 2022 +0200
@@ -36,6 +36,7 @@
 
 from . import compat
 
+abc = collections.abc
 
 BSER_ARRAY = b"\x00"
 BSER_OBJECT = b"\x01"
@@ -207,9 +208,7 @@
             self.ensure_size(needed)
             struct.pack_into(b"=cd", self.buf, self.wpos, BSER_REAL, val)
             self.wpos += needed
-        elif isinstance(val, collections.Mapping) and isinstance(
-            val, collections.Sized
-        ):
+        elif isinstance(val, abc.Mapping) and isinstance(val, abc.Sized):
             val_len = len(val)
             size = _int_size(val_len)
             needed = 2 + size
@@ -260,9 +259,7 @@
             for k, v in iteritems:
                 self.append_string(k)
                 self.append_recursive(v)
-        elif isinstance(val, collections.Iterable) and isinstance(
-            val, collections.Sized
-        ):
+        elif isinstance(val, abc.Iterable) and isinstance(val, abc.Sized):
             val_len = len(val)
             size = _int_size(val_len)
             needed = 2 + size
--- a/hgext/rebase.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/hgext/rebase.py	Mon Oct 24 15:32:14 2022 +0200
@@ -546,7 +546,9 @@
         date = self.date
         if date is None:
             date = ctx.date()
-        extra = {b'rebase_source': ctx.hex()}
+        extra = {}
+        if repo.ui.configbool(b'rebase', b'store-source'):
+            extra = {b'rebase_source': ctx.hex()}
         for c in self.extrafns:
             c(ctx, extra)
         destphase = max(ctx.phase(), phases.draft)
--- a/hgext/releasenotes.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/hgext/releasenotes.py	Mon Oct 24 15:32:14 2022 +0200
@@ -70,7 +70,7 @@
     (b'api', _(b'API Changes')),
 ]
 
-RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
+RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$', re.MULTILINE)
 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
 
 BULLET_SECTION = _(b'Other Changes')
--- a/mercurial/cext/revlog.c	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/cext/revlog.c	Mon Oct 24 15:32:14 2022 +0200
@@ -1382,6 +1382,7 @@
 static int index_issnapshotrev(indexObject *self, Py_ssize_t rev)
 {
 	int ps[2];
+	int b;
 	Py_ssize_t base;
 	while (rev >= 0) {
 		base = (Py_ssize_t)index_baserev(self, rev);
@@ -1399,6 +1400,20 @@
 			assert(PyErr_Occurred());
 			return -1;
 		};
+		while ((index_get_length(self, ps[0]) == 0) && ps[0] >= 0) {
+			b = index_baserev(self, ps[0]);
+			if (b == ps[0]) {
+				break;
+			}
+			ps[0] = b;
+		}
+		while ((index_get_length(self, ps[1]) == 0) && ps[1] >= 0) {
+			b = index_baserev(self, ps[1]);
+			if (b == ps[1]) {
+				break;
+			}
+			ps[1] = b;
+		}
 		if (base == ps[0] || base == ps[1]) {
 			return 0;
 		}
--- a/mercurial/cmdutil.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/cmdutil.py	Mon Oct 24 15:32:14 2022 +0200
@@ -832,7 +832,7 @@
 
 @attr.s(frozen=True)
 class morestatus:
-    reporoot = attr.ib()
+    repo = attr.ib()
     unfinishedop = attr.ib()
     unfinishedmsg = attr.ib()
     activemerge = attr.ib()
@@ -876,7 +876,7 @@
             mergeliststr = b'\n'.join(
                 [
                     b'    %s'
-                    % util.pathto(self.reporoot, encoding.getcwd(), path)
+                    % util.pathto(self.repo.root, encoding.getcwd(), path)
                     for path in self.unresolvedpaths
                 ]
             )
@@ -898,6 +898,7 @@
                     # Already output.
                     continue
                 fm.startitem()
+                fm.context(repo=self.repo)
                 # We can't claim to know the status of the file - it may just
                 # have been in one of the states that were not requested for
                 # display, so it could be anything.
@@ -923,7 +924,7 @@
     if activemerge:
         unresolved = sorted(mergestate.unresolved())
     return morestatus(
-        repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
+        repo, unfinishedop, unfinishedmsg, activemerge, unresolved
     )
 
 
--- a/mercurial/commands.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/commands.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1035,7 +1035,14 @@
     state = hbisect.load_state(repo)
 
     if rev:
-        nodes = [repo[i].node() for i in logcmdutil.revrange(repo, rev)]
+        revs = logcmdutil.revrange(repo, rev)
+        goodnodes = state[b'good']
+        badnodes = state[b'bad']
+        if goodnodes and badnodes:
+            candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
+            candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
+            revs = candidates & revs
+        nodes = [repo.changelog.node(i) for i in revs]
     else:
         nodes = [repo.lookup(b'.')]
 
@@ -1485,6 +1492,12 @@
     b'bundle',
     [
         (
+            b'',
+            b'exact',
+            None,
+            _(b'compute the base from the revision specified'),
+        ),
+        (
             b'f',
             b'force',
             None,
@@ -1553,6 +1566,7 @@
     Returns 0 on success, 1 if no changes found.
     """
     opts = pycompat.byteskwargs(opts)
+
     revs = None
     if b'rev' in opts:
         revstrings = opts[b'rev']
@@ -1586,7 +1600,19 @@
             )
         if opts.get(b'base'):
             ui.warn(_(b"ignoring --base because --all was specified\n"))
+        if opts.get(b'exact'):
+            ui.warn(_(b"ignoring --exact because --all was specified\n"))
         base = [nullrev]
+    elif opts.get(b'exact'):
+        if dests:
+            raise error.InputError(
+                _(b"--exact is incompatible with specifying destinations")
+            )
+        if opts.get(b'base'):
+            ui.warn(_(b"ignoring --base because --exact was specified\n"))
+        base = repo.revs(b'parents(%ld) - %ld', revs, revs)
+        if not base:
+            base = [nullrev]
     else:
         base = logcmdutil.revrange(repo, opts.get(b'base'))
     if cgversion not in changegroup.supportedoutgoingversions(repo):
@@ -6954,11 +6980,13 @@
     )
 
     copy = {}
-    if (
-        opts.get(b'all')
-        or opts.get(b'copies')
-        or ui.configbool(b'ui', b'statuscopies')
-    ) and not opts.get(b'no_status'):
+    show_copies = ui.configbool(b'ui', b'statuscopies')
+    if opts.get(b'copies') is not None:
+        show_copies = opts.get(b'copies')
+    show_copies = (show_copies or opts.get(b'all')) and not opts.get(
+        b'no_status'
+    )
+    if show_copies:
         copy = copies.pathcopies(ctx1, ctx2, m)
 
     morestatus = None
--- a/mercurial/configitems.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/configitems.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1425,12 +1425,38 @@
     default=False,
     experimental=True,
 )
+
+# Moving this on by default means we are confident about the scaling of phases.
+# This is not garanteed to be the case at the time this message is written.
 coreconfigitem(
     b'format',
-    b'internal-phase',
+    b'use-internal-phase',
     default=False,
     experimental=True,
 )
+# The interaction between the archived phase and obsolescence markers needs to
+# be sorted out before wider usage of this are to be considered.
+#
+# At the time this message is written, behavior when archiving obsolete
+# changeset differ significantly from stripping. As part of stripping, we also
+# remove the obsolescence marker associated to the stripped changesets,
+# revealing the precedecessors changesets when applicable. When archiving, we
+# don't touch the obsolescence markers, keeping everything hidden. This can
+# result in quite confusing situation for people combining exchanging draft
+# with the archived phases. As some markers needed by others may be skipped
+# during exchange.
+coreconfigitem(
+    b'format',
+    b'exp-archived-phase',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'shelve',
+    b'store',
+    default=b'internal',
+    experimental=True,
+)
 coreconfigitem(
     b'fsmonitor',
     b'warn_when_unused',
@@ -2835,3 +2861,17 @@
     b'experimental.inmemory',
     default=False,
 )
+
+# This setting controls creation of a rebase_source extra field
+# during rebase. When False, no such field is created. This is
+# useful eg for incrementally converting changesets and then
+# rebasing them onto an existing repo.
+# WARNING: this is an advanced setting reserved for people who know
+# exactly what they are doing. Misuse of this setting can easily
+# result in obsmarker cycles and a vivid headache.
+coreconfigitem(
+    b'rebase',
+    b'store-source',
+    default=True,
+    experimental=True,
+)
--- a/mercurial/debugcommands.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/debugcommands.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1021,7 +1021,7 @@
     deltacomputer = deltautil.deltacomputer(
         revlog,
         write_debug=ui.write,
-        debug_search=True,
+        debug_search=not ui.quiet,
     )
 
     node = revlog.node(rev)
--- a/mercurial/defaultrc/mergetools.rc	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/defaultrc/mergetools.rc	Mon Oct 24 15:32:14 2022 +0200
@@ -107,7 +107,7 @@
 
 meld.args=--label=$labellocal $local --label='merged' $base --label=$labelother $other -o $output --auto-merge
 meld.check=changed
-meld.diffargs=-a --label=$plabel1 $parent --label=$clabel $child
+meld.diffargs=--label=$plabel1 $parent --label=$clabel $child
 meld.gui=True
 
 merge.check=conflicts
--- a/mercurial/dirstate.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/dirstate.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1287,6 +1287,7 @@
 
         allowed_matchers = (
             matchmod.alwaysmatcher,
+            matchmod.differencematcher,
             matchmod.exactmatcher,
             matchmod.includematcher,
             matchmod.intersectionmatcher,
--- a/mercurial/dispatch.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/dispatch.py	Mon Oct 24 15:32:14 2022 +0200
@@ -952,14 +952,22 @@
 
     Takes paths in [cwd]/.hg/hgrc into account."
     """
+    try:
+        cwd = encoding.getcwd()
+    except OSError as e:
+        raise error.Abort(
+            _(b"error getting current working directory: %s")
+            % encoding.strtolocal(e.strerror)
+        )
+
+    # If using an alternate wd, temporarily switch to it so that relative
+    # paths are resolved correctly during config loading.
+    oldcwd = None
     if wd is None:
-        try:
-            wd = encoding.getcwd()
-        except OSError as e:
-            raise error.Abort(
-                _(b"error getting current working directory: %s")
-                % encoding.strtolocal(e.strerror)
-            )
+        wd = cwd
+    else:
+        oldcwd = cwd
+        os.chdir(wd)
 
     path = cmdutil.findrepo(wd) or b""
     if not path:
@@ -979,6 +987,9 @@
             lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
             lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
 
+    if oldcwd:
+        os.chdir(oldcwd)
+
     return path, lui
 
 
--- a/mercurial/hbisect.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/hbisect.py	Mon Oct 24 15:32:14 2022 +0200
@@ -39,7 +39,7 @@
     def buildancestors(bad, good):
         badrev = min([changelog.rev(n) for n in bad])
         ancestors = collections.defaultdict(lambda: None)
-        for rev in repo.revs(b"descendants(%ln) - ancestors(%ln)", good, good):
+        for rev in repo.revs(b"(%ln::%d) - (::%ln)", good, badrev, good):
             ancestors[rev] = []
         if ancestors[badrev] is None:
             return badrev, None
@@ -115,11 +115,21 @@
             poison.update(children.get(rev, []))
             continue
 
+        unvisited = []
         for c in children.get(rev, []):
             if ancestors[c]:
                 ancestors[c] = list(set(ancestors[c] + a))
             else:
+                unvisited.append(c)
+
+        # Reuse existing ancestor list for the first unvisited child to avoid
+        # excessive copying for linear portions of history.
+        if unvisited:
+            first = unvisited.pop(0)
+            for c in unvisited:
                 ancestors[c] = a + [c]
+            a.append(first)
+            ancestors[first] = a
 
     assert best_rev is not None
     best_node = changelog.node(best_rev)
--- a/mercurial/helptext/bundlespec.txt	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/helptext/bundlespec.txt	Mon Oct 24 15:32:14 2022 +0200
@@ -67,6 +67,33 @@
 
 .. bundlecompressionmarker
 
+Available Options
+=================
+
+The following options exist:
+
+changegroup
+    Include the changegroup data in the bundle (default to True).
+
+cg.version
+    Select the version of the changegroup to use. Available options are : 01, 02
+    or 03. By default it will be automatically selected according to the current
+    repository format.
+
+obsolescence
+    Include obsolescence-markers relevant to the bundled changesets.
+
+phases
+    Include phase information relevant to the bundled changesets.
+
+revbranchcache
+    Include the "tags-fnodes" cache inside the bundle.
+
+
+tagsfnodescache
+    Include the "tags-fnodes" cache inside the bundle.
+
+
 Examples
 ========
 
--- a/mercurial/hgweb/__init__.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/hgweb/__init__.py	Mon Oct 24 15:32:14 2022 +0200
@@ -18,12 +18,15 @@
 
 from ..utils import procutil
 
+# pytype: disable=pyi-error
 from . import (
     hgweb_mod,
     hgwebdir_mod,
     server,
 )
 
+# pytype: enable=pyi-error
+
 
 def hgweb(config, name=None, baseui=None):
     """create an hgweb wsgi object
--- a/mercurial/localrepo.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/localrepo.py	Mon Oct 24 15:32:14 2022 +0200
@@ -522,12 +522,8 @@
     # the repository. This file was introduced in Mercurial 0.9.2,
     # which means very old repositories may not have one. We assume
     # a missing file translates to no requirements.
-    try:
-        return set(vfs.read(b'requires').splitlines())
-    except FileNotFoundError:
-        if not allowmissing:
-            raise
-        return set()
+    read = vfs.tryread if allowmissing else vfs.read
+    return set(read(b'requires').splitlines())
 
 
 def makelocalrepository(baseui, path, intents=None):
@@ -1281,6 +1277,7 @@
     """
 
     _basesupported = {
+        requirementsmod.ARCHIVED_PHASE_REQUIREMENT,
         requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
         requirementsmod.CHANGELOGV2_REQUIREMENT,
         requirementsmod.COPIESSDC_REQUIREMENT,
@@ -3668,9 +3665,13 @@
         requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
         requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
     # experimental config: format.internal-phase
-    if ui.configbool(b'format', b'internal-phase'):
+    if ui.configbool(b'format', b'use-internal-phase'):
         requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
 
+    # experimental config: format.exp-archived-phase
+    if ui.configbool(b'format', b'exp-archived-phase'):
+        requirements.add(requirementsmod.ARCHIVED_PHASE_REQUIREMENT)
+
     if createopts.get(b'narrowfiles'):
         requirements.add(requirementsmod.NARROW_REQUIREMENT)
 
--- a/mercurial/obsolete.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/obsolete.py	Mon Oct 24 15:32:14 2022 +0200
@@ -70,6 +70,7 @@
 
 import binascii
 import struct
+import weakref
 
 from .i18n import _
 from .pycompat import getattr
@@ -561,10 +562,18 @@
         # caches for various obsolescence related cache
         self.caches = {}
         self.svfs = svfs
-        self.repo = repo
+        self._repo = weakref.ref(repo)
         self._defaultformat = defaultformat
         self._readonly = readonly
 
+    @property
+    def repo(self):
+        r = self._repo()
+        if r is None:
+            msg = "using the obsstore of a deallocated repo"
+            raise error.ProgrammingError(msg)
+        return r
+
     def __iter__(self):
         return iter(self._all)
 
--- a/mercurial/phases.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/phases.py	Mon Oct 24 15:32:14 2022 +0200
@@ -178,6 +178,12 @@
     return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements
 
 
+def supportarchived(repo):
+    # type: (localrepo.localrepository) -> bool
+    """True if the archived phase can be used on a repository"""
+    return requirements.ARCHIVED_PHASE_REQUIREMENT in repo.requirements
+
+
 def _readroots(repo, phasedefaults=None):
     # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool]
     """Read phase roots from disk
@@ -642,7 +648,12 @@
         # phaseroots values, replace them.
         if revs is None:
             revs = []
-        if targetphase in (archived, internal) and not supportinternal(repo):
+        if (
+            targetphase == internal
+            and not supportinternal(repo)
+            or targetphase == archived
+            and not supportarchived(repo)
+        ):
             name = phasenames[targetphase]
             msg = b'this repository does not support the %s phase' % name
             raise error.ProgrammingError(msg)
--- a/mercurial/requirements.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/requirements.py	Mon Oct 24 15:32:14 2022 +0200
@@ -29,7 +29,11 @@
 
 # Enables the internal phase which is used to hide changesets instead
 # of stripping them
-INTERNAL_PHASE_REQUIREMENT = b'internal-phase'
+INTERNAL_PHASE_REQUIREMENT = b'internal-phase-2'
+
+# Enables the internal phase which is used to hide changesets instead
+# of stripping them
+ARCHIVED_PHASE_REQUIREMENT = b'exp-archived-phase'
 
 # Stores manifest in Tree structure
 TREEMANIFEST_REQUIREMENT = b'treemanifest'
@@ -107,6 +111,7 @@
 #
 # note: the list is currently inherited from previous code and miss some relevant requirement while containing some irrelevant ones.
 STREAM_FIXED_REQUIREMENTS = {
+    ARCHIVED_PHASE_REQUIREMENT,
     BOOKMARKS_IN_STORE_REQUIREMENT,
     CHANGELOGV2_REQUIREMENT,
     COPIESSDC_REQUIREMENT,
--- a/mercurial/revlog.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/revlog.py	Mon Oct 24 15:32:14 2022 +0200
@@ -235,6 +235,8 @@
     b'  expected %d bytes from offset %d, data size is %d'
 )
 
+hexdigits = b'0123456789abcdefABCDEF'
+
 
 class revlog:
     """
@@ -1509,7 +1511,7 @@
                 ambiguous = True
             # fall through to slow path that filters hidden revisions
         except (AttributeError, ValueError):
-            # we are pure python, or key was too short to search radix tree
+            # we are pure python, or key is not hex
             pass
         if ambiguous:
             raise error.AmbiguousPrefixLookupError(
@@ -1523,6 +1525,11 @@
             # hex(node)[:...]
             l = len(id) // 2 * 2  # grab an even number of digits
             try:
+                # we're dropping the last digit, so let's check that it's hex,
+                # to avoid the expensive computation below if it's not
+                if len(id) % 2 > 0:
+                    if not (id[-1] in hexdigits):
+                        return None
                 prefix = bin(id[:l])
             except binascii.Error:
                 pass
@@ -1768,7 +1775,17 @@
         if base == nullrev:
             return True
         p1 = entry[5]
+        while self.length(p1) == 0:
+            b = self.deltaparent(p1)
+            if b == p1:
+                break
+            p1 = b
         p2 = entry[6]
+        while self.length(p2) == 0:
+            b = self.deltaparent(p2)
+            if b == p2:
+                break
+            p2 = b
         if base == p1 or base == p2:
             return False
         return self.issnapshot(base)
--- a/mercurial/revset.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/revset.py	Mon Oct 24 15:32:14 2022 +0200
@@ -7,7 +7,10 @@
 
 
 import binascii
+import functools
+import random
 import re
+import sys
 
 from .i18n import _
 from .pycompat import getattr
@@ -2339,14 +2342,28 @@
     parents = repo.changelog.parentrevs
 
     def filter(r):
-        for p in parents(r):
-            if 0 <= p and p in s:
-                return False
+        try:
+            for p in parents(r):
+                if 0 <= p and p in s:
+                    return False
+        except error.WdirUnsupported:
+            for p in repo[None].parents():
+                if p.rev() in s:
+                    return False
         return True
 
     return subset & s.filter(filter, condrepr=b'<roots>')
 
 
+MAXINT = sys.maxsize
+MININT = -MAXINT - 1
+
+
+def pick_random(c, gen=random):
+    # exists as its own function to make it possible to overwrite the seed
+    return gen.randint(MININT, MAXINT)
+
+
 _sortkeyfuncs = {
     b'rev': scmutil.intrev,
     b'branch': lambda c: c.branch(),
@@ -2355,12 +2372,17 @@
     b'author': lambda c: c.user(),
     b'date': lambda c: c.date()[0],
     b'node': scmutil.binnode,
+    b'random': pick_random,
 }
 
 
 def _getsortargs(x):
     """Parse sort options into (set, [(key, reverse)], opts)"""
-    args = getargsdict(x, b'sort', b'set keys topo.firstbranch')
+    args = getargsdict(
+        x,
+        b'sort',
+        b'set keys topo.firstbranch random.seed',
+    )
     if b'set' not in args:
         # i18n: "sort" is a keyword
         raise error.ParseError(_(b'sort requires one or two arguments'))
@@ -2400,6 +2422,20 @@
                 )
             )
 
+    if b'random.seed' in args:
+        if any(k == b'random' for k, reverse in keyflags):
+            s = args[b'random.seed']
+            seed = getstring(s, _(b"random.seed must be a string"))
+            opts[b'random.seed'] = seed
+        else:
+            # i18n: "random" and "random.seed" are keywords
+            raise error.ParseError(
+                _(
+                    b'random.seed can only be used '
+                    b'when using the random sort key'
+                )
+            )
+
     return args[b'set'], keyflags, opts
 
 
@@ -2419,11 +2455,14 @@
     - ``date`` for the commit date
     - ``topo`` for a reverse topographical sort
     - ``node`` the nodeid of the revision
+    - ``random`` randomly shuffle revisions
 
     The ``topo`` sort order cannot be combined with other sort keys. This sort
     takes one optional argument, ``topo.firstbranch``, which takes a revset that
     specifies what topographical branches to prioritize in the sort.
 
+    The ``random`` sort takes one optional ``random.seed`` argument to control
+    the pseudo-randomness of the result.
     """
     s, keyflags, opts = _getsortargs(x)
     revs = getset(repo, subset, s, order)
@@ -2435,10 +2474,20 @@
         return revs
     elif keyflags[0][0] == b"topo":
         firstbranch = ()
+        parentrevs = repo.changelog.parentrevs
+        parentsfunc = parentrevs
+        if wdirrev in revs:
+
+            def parentsfunc(r):
+                try:
+                    return parentrevs(r)
+                except error.WdirUnsupported:
+                    return [p.rev() for p in repo[None].parents()]
+
         if b'topo.firstbranch' in opts:
             firstbranch = getset(repo, subset, opts[b'topo.firstbranch'])
         revs = baseset(
-            dagop.toposort(revs, repo.changelog.parentrevs, firstbranch),
+            dagop.toposort(revs, parentsfunc, firstbranch),
             istopo=True,
         )
         if keyflags[0][1]:
@@ -2448,7 +2497,12 @@
     # sort() is guaranteed to be stable
     ctxs = [repo[r] for r in revs]
     for k, reverse in reversed(keyflags):
-        ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
+        func = _sortkeyfuncs[k]
+        if k == b'random' and b'random.seed' in opts:
+            seed = opts[b'random.seed']
+            r = random.Random(seed)
+            func = functools.partial(func, gen=r)
+        ctxs.sort(key=func, reverse=reverse)
     return baseset([c.rev() for c in ctxs])
 
 
--- a/mercurial/scmutil.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/scmutil.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1191,7 +1191,7 @@
                 obsolete.createmarkers(
                     repo, rels, operation=operation, metadata=metadata
                 )
-        elif phases.supportinternal(repo) and mayusearchived:
+        elif phases.supportarchived(repo) and mayusearchived:
             # this assume we do not have "unstable" nodes above the cleaned ones
             allreplaced = set()
             for ns in replacements.keys():
--- a/mercurial/shelve.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/shelve.py	Mon Oct 24 15:32:14 2022 +0200
@@ -22,6 +22,7 @@
 """
 
 import collections
+import io
 import itertools
 import stat
 
@@ -98,6 +99,17 @@
         return sorted(info, reverse=True)
 
 
+def _use_internal_phase(repo):
+    return (
+        phases.supportinternal(repo)
+        and repo.ui.config(b'shelve', b'store') == b'internal'
+    )
+
+
+def _target_phase(repo):
+    return phases.internal if _use_internal_phase(repo) else phases.secret
+
+
 class Shelf:
     """Represents a shelf, including possibly multiple files storing it.
 
@@ -111,12 +123,19 @@
         self.name = name
 
     def exists(self):
-        return self.vfs.exists(self.name + b'.patch') and self.vfs.exists(
-            self.name + b'.hg'
-        )
+        return self._exists(b'.shelve') or self._exists(b'.patch', b'.hg')
+
+    def _exists(self, *exts):
+        return all(self.vfs.exists(self.name + ext) for ext in exts)
 
     def mtime(self):
-        return self.vfs.stat(self.name + b'.patch')[stat.ST_MTIME]
+        try:
+            return self._stat(b'.shelve')[stat.ST_MTIME]
+        except FileNotFoundError:
+            return self._stat(b'.patch')[stat.ST_MTIME]
+
+    def _stat(self, ext):
+        return self.vfs.stat(self.name + ext)
 
     def writeinfo(self, info):
         scmutil.simplekeyvaluefile(self.vfs, self.name + b'.shelve').write(info)
@@ -159,9 +178,7 @@
         filename = self.name + b'.hg'
         fp = self.vfs(filename)
         try:
-            targetphase = phases.internal
-            if not phases.supportinternal(repo):
-                targetphase = phases.secret
+            targetphase = _target_phase(repo)
             gen = exchange.readbundle(repo.ui, fp, filename, self.vfs)
             pretip = repo[b'tip']
             bundle2.applybundle(
@@ -183,6 +200,27 @@
     def open_patch(self, mode=b'rb'):
         return self.vfs(self.name + b'.patch', mode)
 
+    def patch_from_node(self, repo, node):
+        repo = repo.unfiltered()
+        match = _optimized_match(repo, node)
+        fp = io.BytesIO()
+        cmdutil.exportfile(
+            repo,
+            [node],
+            fp,
+            opts=mdiff.diffopts(git=True),
+            match=match,
+        )
+        fp.seek(0)
+        return fp
+
+    def load_patch(self, repo):
+        try:
+            # prefer node-based shelf
+            return self.patch_from_node(repo, self.readinfo()[b'node'])
+        except (FileNotFoundError, error.RepoLookupError):
+            return self.open_patch()
+
     def _backupfilename(self, backupvfs, filename):
         def gennames(base):
             yield base
@@ -210,6 +248,15 @@
             self.vfs.tryunlink(self.name + b'.' + ext)
 
 
+def _optimized_match(repo, node):
+    """
+    Create a matcher so that prefetch doesn't attempt to fetch
+    the entire repository pointlessly, and as an optimisation
+    for movedirstate, if needed.
+    """
+    return scmutil.matchfiles(repo, repo[node].files())
+
+
 class shelvedstate:
     """Handle persistence during unshelving operations.
 
@@ -447,9 +494,7 @@
         if hasmq:
             saved, repo.mq.checkapplied = repo.mq.checkapplied, False
 
-        targetphase = phases.internal
-        if not phases.supportinternal(repo):
-            targetphase = phases.secret
+        targetphase = _target_phase(repo)
         overrides = {(b'phases', b'new-commit'): targetphase}
         try:
             editor_ = False
@@ -510,7 +555,7 @@
 
 
 def _finishshelve(repo, tr):
-    if phases.supportinternal(repo):
+    if _use_internal_phase(repo):
         tr.close()
     else:
         _aborttransaction(repo, tr)
@@ -579,10 +624,7 @@
             _nothingtoshelvemessaging(ui, repo, pats, opts)
             return 1
 
-        # Create a matcher so that prefetch doesn't attempt to fetch
-        # the entire repository pointlessly, and as an optimisation
-        # for movedirstate, if needed.
-        match = scmutil.matchfiles(repo, repo[node].files())
+        match = _optimized_match(repo, node)
         _shelvecreatedcommit(repo, node, name, match)
 
         ui.status(_(b'shelved as %s\n') % name)
@@ -668,7 +710,7 @@
         ui.write(age, label=b'shelve.age')
         ui.write(b' ' * (12 - len(age)))
         used += 12
-        with shelf_dir.get(name).open_patch() as fp:
+        with shelf_dir.get(name).load_patch(repo) as fp:
             while True:
                 line = fp.readline()
                 if not line:
@@ -754,7 +796,7 @@
             if state.activebookmark and state.activebookmark in repo._bookmarks:
                 bookmarks.activate(repo, state.activebookmark)
             mergefiles(ui, repo, state.wctx, state.pendingctx)
-            if not phases.supportinternal(repo):
+            if not _use_internal_phase(repo):
                 repair.strip(
                     ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
                 )
@@ -816,9 +858,7 @@
             repo.setparents(state.pendingctx.node(), repo.nullid)
             repo.dirstate.write(repo.currenttransaction())
 
-        targetphase = phases.internal
-        if not phases.supportinternal(repo):
-            targetphase = phases.secret
+        targetphase = _target_phase(repo)
         overrides = {(b'phases', b'new-commit'): targetphase}
         with repo.ui.configoverride(overrides, b'unshelve'):
             with repo.dirstate.parentchange():
@@ -843,7 +883,7 @@
         mergefiles(ui, repo, state.wctx, shelvectx)
         restorebranch(ui, repo, state.branchtorestore)
 
-        if not phases.supportinternal(repo):
+        if not _use_internal_phase(repo):
             repair.strip(
                 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
             )
@@ -957,7 +997,7 @@
         user=shelvectx.user(),
     )
     if snode:
-        m = scmutil.matchfiles(repo, repo[snode].files())
+        m = _optimized_match(repo, snode)
         _shelvecreatedcommit(repo, snode, basename, m)
 
     return newnode, bool(snode)
@@ -1137,7 +1177,6 @@
         oldtiprev = len(repo)
 
         pctx = repo[b'.']
-        tmpwctx = pctx
         # The goal is to have a commit structure like so:
         # ...-> pctx -> tmpwctx -> shelvectx
         # where tmpwctx is an optional commit with the user's pending changes
@@ -1145,9 +1184,7 @@
         # to the original pctx.
 
         activebookmark = _backupactivebookmark(repo)
-        tmpwctx, addedbefore = _commitworkingcopychanges(
-            ui, repo, opts, tmpwctx
-        )
+        tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts, pctx)
         repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
         _checkunshelveuntrackedproblems(ui, repo, shelvectx)
         branchtorestore = b''
--- a/mercurial/subrepo.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/subrepo.py	Mon Oct 24 15:32:14 2022 +0200
@@ -1099,6 +1099,10 @@
             # --non-interactive.
             if commands[0] in (b'update', b'checkout', b'commit'):
                 cmd.append(b'--non-interactive')
+        if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'):
+            # On Windows, prevent command prompts windows from popping up when
+            # running in pythonw.
+            extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW')
         cmd.extend(commands)
         if filename is not None:
             path = self.wvfs.reljoin(
@@ -1150,7 +1154,7 @@
         # commit revision so we can compare the subrepo state with
         # both. We used to store the working directory one.
         output, err = self._svncommand([b'info', b'--xml'])
-        doc = xml.dom.minidom.parseString(output)
+        doc = xml.dom.minidom.parseString(output)  # pytype: disable=pyi-error
         entries = doc.getElementsByTagName('entry')
         lastrev, rev = b'0', b'0'
         if entries:
@@ -1174,7 +1178,7 @@
         """
         output, err = self._svncommand([b'status', b'--xml'])
         externals, changes, missing = [], [], []
-        doc = xml.dom.minidom.parseString(output)
+        doc = xml.dom.minidom.parseString(output)  # pytype: disable=pyi-error
         for e in doc.getElementsByTagName('entry'):
             s = e.getElementsByTagName('wc-status')
             if not s:
@@ -1319,7 +1323,7 @@
     @annotatesubrepoerror
     def files(self):
         output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
-        doc = xml.dom.minidom.parseString(output)
+        doc = xml.dom.minidom.parseString(output)  # pytype: disable=pyi-error
         paths = []
         for e in doc.getElementsByTagName('entry'):
             kind = pycompat.bytestr(e.getAttribute('kind'))
@@ -1469,6 +1473,11 @@
             # insert the argument in the front,
             # the end of git diff arguments is used for paths
             commands.insert(1, b'--color')
+        extrakw = {}
+        if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'):
+            # On Windows, prevent command prompts windows from popping up when
+            # running in pythonw.
+            extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW')
         p = subprocess.Popen(
             pycompat.rapply(
                 procutil.tonativestr, [self._gitexecutable] + commands
@@ -1479,6 +1488,7 @@
             close_fds=procutil.closefds,
             stdout=subprocess.PIPE,
             stderr=errpipe,
+            **extrakw
         )
         if stream:
             return p.stdout, None
--- a/mercurial/templatefilters.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/templatefilters.py	Mon Oct 24 15:32:14 2022 +0200
@@ -390,6 +390,14 @@
     return stringutil.person(author)
 
 
+@templatefilter(b'reverse')
+def reverse(list_):
+    """List. Reverses the order of list items."""
+    if isinstance(list_, list):
+        return templateutil.hybridlist(list_[::-1], name=b'item')
+    raise error.ParseError(_(b'not reversible'))
+
+
 @templatefilter(b'revescape', intype=bytes)
 def revescape(text):
     """Any text. Escapes all "special" characters, except @.
--- a/mercurial/url.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/mercurial/url.py	Mon Oct 24 15:32:14 2022 +0200
@@ -222,13 +222,16 @@
     h.headers = None
 
 
-def _generic_proxytunnel(self):
+def _generic_proxytunnel(self: "httpsconnection"):
+    headers = self.headers  # pytype: disable=attribute-error
     proxyheaders = {
-        pycompat.bytestr(x): pycompat.bytestr(self.headers[x])
-        for x in self.headers
+        pycompat.bytestr(x): pycompat.bytestr(headers[x])
+        for x in headers
         if x.lower().startswith('proxy-')
     }
-    self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
+    realhostport = self.realhostport  # pytype: disable=attribute-error
+    self.send(b'CONNECT %s HTTP/1.0\r\n' % realhostport)
+
     for header in proxyheaders.items():
         self.send(b'%s: %s\r\n' % header)
     self.send(b'\r\n')
@@ -237,10 +240,14 @@
     # httplib.HTTPConnection as there are no adequate places to
     # override functions to provide the needed functionality.
 
+    # pytype: disable=attribute-error
     res = self.response_class(self.sock, method=self._method)
+    # pytype: enable=attribute-error
 
     while True:
+        # pytype: disable=attribute-error
         version, status, reason = res._read_status()
+        # pytype: enable=attribute-error
         if status != httplib.CONTINUE:
             break
         # skip lines that are all whitespace
@@ -323,14 +330,15 @@
             self.sock = socket.create_connection((self.host, self.port))
 
             host = self.host
-            if self.realhostport:  # use CONNECT proxy
+            realhostport = self.realhostport  # pytype: disable=attribute-error
+            if realhostport:  # use CONNECT proxy
                 _generic_proxytunnel(self)
-                host = self.realhostport.rsplit(b':', 1)[0]
+                host = realhostport.rsplit(b':', 1)[0]
             self.sock = sslutil.wrapsocket(
                 self.sock,
                 self.key_file,
                 self.cert_file,
-                ui=self.ui,
+                ui=self.ui,  # pytype: disable=attribute-error
                 serverhostname=host,
             )
             sslutil.validatesocket(self.sock)
--- a/relnotes/next	Thu Oct 20 12:05:17 2022 -0400
+++ b/relnotes/next	Mon Oct 24 15:32:14 2022 +0200
@@ -13,6 +13,12 @@
 
 == Backwards Compatibility Changes ==
 
+ * chg worker processes will now correctly load per-repository configuration 
+   when given a both a relative `--repository` path and an alternate working
+   directory via `--cwd`. A side-effect of this change is that these workers
+   will now return an error if hg cannot find the current working directory,
+   even when a different directory is specified via `--cwd`.
+
 == Internal API Changes ==
 
 == Miscellaneous ==
--- a/rust/Cargo.lock	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/Cargo.lock	Mon Oct 24 15:32:14 2022 +0200
@@ -468,6 +468,7 @@
  "log",
  "memmap2",
  "micro-timer",
+ "once_cell",
  "ouroboros",
  "pretty_assertions",
  "rand 0.8.5",
@@ -687,6 +688,12 @@
 ]
 
 [[package]]
+name = "once_cell"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
+
+[[package]]
 name = "opaque-debug"
 version = "0.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -981,6 +988,7 @@
  "lazy_static",
  "log",
  "micro-timer",
+ "rayon",
  "regex",
  "users",
  "which",
--- a/rust/hg-core/Cargo.toml	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/Cargo.toml	Mon Oct 24 15:32:14 2022 +0200
@@ -35,6 +35,9 @@
 memmap2 = { version = "0.5.3", features = ["stable_deref_trait"] }
 zstd = "0.5.3"
 format-bytes = "0.3.0"
+# once_cell 1.15 uses edition 2021, while the heptapod CI 
+# uses an old version of Cargo that doesn't support it.
+once_cell = "=1.14.0"
 
 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
 # we have a clearer view of which backend is the fastest.
--- a/rust/hg-core/src/config.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/config.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -11,6 +11,8 @@
 
 mod config;
 mod layer;
+mod plain_info;
 mod values;
 pub use config::{Config, ConfigSource, ConfigValueParseError};
 pub use layer::{ConfigError, ConfigOrigin, ConfigParseError};
+pub use plain_info::PlainInfo;
--- a/rust/hg-core/src/config/config.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/config/config.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -12,6 +12,7 @@
 use crate::config::layer::{
     ConfigError, ConfigLayer, ConfigOrigin, ConfigValue,
 };
+use crate::config::plain_info::PlainInfo;
 use crate::utils::files::get_bytes_from_os_str;
 use format_bytes::{write_bytes, DisplayBytes};
 use std::collections::HashSet;
@@ -27,6 +28,7 @@
 #[derive(Clone)]
 pub struct Config {
     layers: Vec<layer::ConfigLayer>,
+    plain: PlainInfo,
 }
 
 impl DisplayBytes for Config {
@@ -83,17 +85,55 @@
     }
 }
 
+/// Returns true if the config item is disabled by PLAIN or PLAINEXCEPT
+fn should_ignore(plain: &PlainInfo, section: &[u8], item: &[u8]) -> bool {
+    // duplication with [_applyconfig] in [ui.py],
+    if !plain.is_plain() {
+        return false;
+    }
+    if section == b"alias" {
+        return plain.plainalias();
+    }
+    if section == b"revsetalias" {
+        return plain.plainrevsetalias();
+    }
+    if section == b"templatealias" {
+        return plain.plaintemplatealias();
+    }
+    if section == b"ui" {
+        let to_delete: &[&[u8]] = &[
+            b"debug",
+            b"fallbackencoding",
+            b"quiet",
+            b"slash",
+            b"logtemplate",
+            b"message-output",
+            b"statuscopies",
+            b"style",
+            b"traceback",
+            b"verbose",
+        ];
+        return to_delete.contains(&item);
+    }
+    let sections_to_delete: &[&[u8]] =
+        &[b"defaults", b"commands", b"command-templates"];
+    return sections_to_delete.contains(&section);
+}
+
 impl Config {
     /// The configuration to use when printing configuration-loading errors
     pub fn empty() -> Self {
-        Self { layers: Vec::new() }
+        Self {
+            layers: Vec::new(),
+            plain: PlainInfo::empty(),
+        }
     }
 
     /// Load system and user configuration from various files.
     ///
     /// This is also affected by some environment variables.
     pub fn load_non_repo() -> Result<Self, ConfigError> {
-        let mut config = Self { layers: Vec::new() };
+        let mut config = Self::empty();
         let opt_rc_path = env::var_os("HGRCPATH");
         // HGRCPATH replaces system config
         if opt_rc_path.is_none() {
@@ -266,7 +306,10 @@
             }
         }
 
-        Ok(Config { layers })
+        Ok(Config {
+            layers,
+            plain: PlainInfo::empty(),
+        })
     }
 
     /// Loads the per-repository config into a new `Config` which is combined
@@ -283,6 +326,7 @@
 
         let mut repo_config = Self {
             layers: other_layers,
+            plain: PlainInfo::empty(),
         };
         for path in repo_config_files {
             // TODO: check if this file should be trusted:
@@ -293,6 +337,10 @@
         Ok(repo_config)
     }
 
+    pub fn apply_plain(&mut self, plain: PlainInfo) {
+        self.plain = plain;
+    }
+
     fn get_parse<'config, T: 'config>(
         &'config self,
         section: &[u8],
@@ -413,10 +461,25 @@
         section: &[u8],
         item: &[u8],
     ) -> Option<(&ConfigLayer, &ConfigValue)> {
+        // Filter out the config items that are hidden by [PLAIN].
+        // This differs from python hg where we delete them from the config.
+        let should_ignore = should_ignore(&self.plain, &section, &item);
         for layer in self.layers.iter().rev() {
             if !layer.trusted {
                 continue;
             }
+            //The [PLAIN] config should not affect the defaults.
+            //
+            // However, PLAIN should also affect the "tweaked" defaults (unless
+            // "tweakdefault" is part of "HGPLAINEXCEPT").
+            //
+            // In practice the tweak-default layer is only added when it is
+            // relevant, so we can safely always take it into
+            // account here.
+            if should_ignore && !(layer.origin == ConfigOrigin::Tweakdefaults)
+            {
+                continue;
+            }
             if let Some(v) = layer.get(&section, &item) {
                 return Some((&layer, v));
             }
@@ -504,6 +567,38 @@
         }
         res
     }
+
+    // a config layer that's introduced by ui.tweakdefaults
+    fn tweakdefaults_layer() -> ConfigLayer {
+        let mut layer = ConfigLayer::new(ConfigOrigin::Tweakdefaults);
+
+        let mut add = |section: &[u8], item: &[u8], value: &[u8]| {
+            layer.add(
+                section[..].into(),
+                item[..].into(),
+                value[..].into(),
+                None,
+            );
+        };
+        // duplication of [tweakrc] from [ui.py]
+        add(b"ui", b"rollback", b"False");
+        add(b"ui", b"statuscopies", b"yes");
+        add(b"ui", b"interface", b"curses");
+        add(b"ui", b"relative-paths", b"yes");
+        add(b"commands", b"grep.all-files", b"True");
+        add(b"commands", b"update.check", b"noconflict");
+        add(b"commands", b"status.verbose", b"True");
+        add(b"commands", b"resolve.explicit-re-merge", b"True");
+        add(b"git", b"git", b"1");
+        add(b"git", b"showfunc", b"1");
+        add(b"git", b"word-diff", b"1");
+        return layer;
+    }
+
+    // introduce the tweaked defaults as implied by ui.tweakdefaults
+    pub fn tweakdefaults<'a>(&mut self) -> () {
+        self.layers.insert(0, Config::tweakdefaults_layer());
+    }
 }
 
 #[cfg(test)]
--- a/rust/hg-core/src/config/layer.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/config/layer.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -81,6 +81,7 @@
                         String::from_utf8_lossy(arg),
                     ),
                     CONFIG_PARSE_ERROR_ABORT,
+                    None,
                 ))?
             }
         }
@@ -299,6 +300,8 @@
 pub enum ConfigOrigin {
     /// From a configuration file
     File(PathBuf),
+    /// From [ui.tweakdefaults]
+    Tweakdefaults,
     /// From a `--config` CLI argument
     CommandLine,
     /// From a `--color` CLI argument
@@ -321,6 +324,9 @@
             ConfigOrigin::CommandLine => out.write_all(b"--config"),
             ConfigOrigin::CommandLineColor => out.write_all(b"--color"),
             ConfigOrigin::Environment(e) => write_bytes!(out, b"${}", e),
+            ConfigOrigin::Tweakdefaults => {
+                write_bytes!(out, b"ui.tweakdefaults")
+            }
         }
     }
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/config/plain_info.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,79 @@
+use crate::utils::files::get_bytes_from_os_string;
+use std::env;
+
+/// Keeps information on whether plain mode is active.
+///
+/// Plain mode means that all configuration variables which affect
+/// the behavior and output of Mercurial should be
+/// ignored. Additionally, the output should be stable,
+/// reproducible and suitable for use in scripts or applications.
+///
+/// The only way to trigger plain mode is by setting either the
+/// `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
+///
+/// The return value can either be
+/// - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
+/// - False if feature is disabled by default and not included in HGPLAIN
+/// - True otherwise
+#[derive(Clone)]
+pub struct PlainInfo {
+    is_plain: bool,
+    except: Vec<Vec<u8>>,
+}
+
+impl PlainInfo {
+    fn plain_except(except: Vec<Vec<u8>>) -> Self {
+        PlainInfo {
+            is_plain: true,
+            except,
+        }
+    }
+
+    pub fn empty() -> PlainInfo {
+        PlainInfo {
+            is_plain: false,
+            except: vec![],
+        }
+    }
+
+    pub fn from_env() -> PlainInfo {
+        if let Some(except) = env::var_os("HGPLAINEXCEPT") {
+            PlainInfo::plain_except(
+                get_bytes_from_os_string(except)
+                    .split(|&byte| byte == b',')
+                    .map(|x| x.to_vec())
+                    .collect(),
+            )
+        } else {
+            PlainInfo {
+                is_plain: env::var_os("HGPLAIN").is_some(),
+                except: vec![],
+            }
+        }
+    }
+
+    pub fn is_feature_plain(&self, feature: &str) -> bool {
+        return self.is_plain
+            && !self
+                .except
+                .iter()
+                .any(|exception| exception.as_slice() == feature.as_bytes());
+    }
+
+    pub fn is_plain(&self) -> bool {
+        self.is_plain
+    }
+
+    pub fn plainalias(&self) -> bool {
+        self.is_feature_plain("alias")
+    }
+    pub fn plainrevsetalias(&self) -> bool {
+        self.is_feature_plain("revsetalias")
+    }
+    pub fn plaintemplatealias(&self) -> bool {
+        self.is_feature_plain("templatealias")
+    }
+    pub fn plaintweakdefaults(&self) -> bool {
+        self.is_feature_plain("tweakdefaults")
+    }
+}
--- a/rust/hg-core/src/dirstate.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/dirstate.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -30,6 +30,10 @@
         p1: NULL_NODE,
         p2: NULL_NODE,
     };
+
+    pub fn is_merge(&self) -> bool {
+        return !(self.p2 == NULL_NODE);
+    }
 }
 
 pub type StateMapIter<'a> = Box<
--- a/rust/hg-core/src/dirstate_tree/status.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/dirstate_tree/status.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -20,6 +20,7 @@
 use crate::StatusError;
 use crate::StatusOptions;
 use micro_timer::timed;
+use once_cell::sync::OnceCell;
 use rayon::prelude::*;
 use sha1::{Digest, Sha1};
 use std::borrow::Cow;
@@ -126,14 +127,14 @@
     };
     let is_at_repo_root = true;
     let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
-    let has_ignored_ancestor = false;
+    let has_ignored_ancestor = HasIgnoredAncestor::create(None, hg_path);
     let root_cached_mtime = None;
     let root_dir_metadata = None;
     // If the path we have for the repository root is a symlink, do follow it.
     // (As opposed to symlinks within the working directory which are not
     // followed, using `std::fs::symlink_metadata`.)
     common.traverse_fs_directory_and_dirstate(
-        has_ignored_ancestor,
+        &has_ignored_ancestor,
         dmap.root.as_ref(),
         hg_path,
         &root_dir,
@@ -196,6 +197,40 @@
     Unsure,
 }
 
+/// Lazy computation of whether a given path has a hgignored
+/// ancestor.
+struct HasIgnoredAncestor<'a> {
+    /// `path` and `parent` constitute the inputs to the computation,
+    /// `cache` stores the outcome.
+    path: &'a HgPath,
+    parent: Option<&'a HasIgnoredAncestor<'a>>,
+    cache: OnceCell<bool>,
+}
+
+impl<'a> HasIgnoredAncestor<'a> {
+    fn create(
+        parent: Option<&'a HasIgnoredAncestor<'a>>,
+        path: &'a HgPath,
+    ) -> HasIgnoredAncestor<'a> {
+        Self {
+            path,
+            parent,
+            cache: OnceCell::new(),
+        }
+    }
+
+    fn force<'b>(&self, ignore_fn: &IgnoreFnType<'b>) -> bool {
+        match self.parent {
+            None => false,
+            Some(parent) => {
+                *(parent.cache.get_or_init(|| {
+                    parent.force(ignore_fn) || ignore_fn(&self.path)
+                }))
+            }
+        }
+    }
+}
+
 impl<'a, 'tree, 'on_disk> StatusCommon<'a, 'tree, 'on_disk> {
     fn push_outcome(
         &self,
@@ -318,9 +353,9 @@
 
     /// Returns whether all child entries of the filesystem directory have a
     /// corresponding dirstate node or are ignored.
-    fn traverse_fs_directory_and_dirstate(
+    fn traverse_fs_directory_and_dirstate<'ancestor>(
         &self,
-        has_ignored_ancestor: bool,
+        has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
         dirstate_nodes: ChildNodesRef<'tree, 'on_disk>,
         directory_hg_path: &BorrowedPath<'tree, 'on_disk>,
         directory_fs_path: &Path,
@@ -418,7 +453,7 @@
                 }
                 Right(fs_entry) => {
                     has_dirstate_node_or_is_ignored = self.traverse_fs_only(
-                        has_ignored_ancestor,
+                        has_ignored_ancestor.force(&self.ignore_fn),
                         directory_hg_path,
                         fs_entry,
                     )
@@ -429,12 +464,12 @@
         .try_reduce(|| true, |a, b| Ok(a && b))
     }
 
-    fn traverse_fs_and_dirstate(
+    fn traverse_fs_and_dirstate<'ancestor>(
         &self,
         fs_path: &Path,
         fs_metadata: &std::fs::Metadata,
         dirstate_node: NodeRef<'tree, 'on_disk>,
-        has_ignored_ancestor: bool,
+        has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
     ) -> Result<(), DirstateV2ParseError> {
         self.check_for_outdated_directory_cache(&dirstate_node)?;
         let hg_path = &dirstate_node.full_path_borrowed(self.dmap.on_disk)?;
@@ -454,11 +489,14 @@
                     .traversed
                     .push(hg_path.detach_from_tree())
             }
-            let is_ignored = has_ignored_ancestor || (self.ignore_fn)(hg_path);
+            let is_ignored = HasIgnoredAncestor::create(
+                Some(&has_ignored_ancestor),
+                hg_path,
+            );
             let is_at_repo_root = false;
             let children_all_have_dirstate_node_or_are_ignored = self
                 .traverse_fs_directory_and_dirstate(
-                    is_ignored,
+                    &is_ignored,
                     dirstate_node.children(self.dmap.on_disk)?,
                     hg_path,
                     fs_path,
@@ -472,14 +510,14 @@
                 dirstate_node,
             )?
         } else {
-            if file_or_symlink && self.matcher.matches(hg_path) {
+            if file_or_symlink && self.matcher.matches(&hg_path) {
                 if let Some(entry) = dirstate_node.entry()? {
                     if !entry.any_tracked() {
                         // Forward-compat if we start tracking unknown/ignored
                         // files for caching reasons
                         self.mark_unknown_or_ignored(
-                            has_ignored_ancestor,
-                            hg_path,
+                            has_ignored_ancestor.force(&self.ignore_fn),
+                            &hg_path,
                         );
                     }
                     if entry.added() {
@@ -495,7 +533,7 @@
                     // `node.entry.is_none()` indicates a "directory"
                     // node, but the filesystem has a file
                     self.mark_unknown_or_ignored(
-                        has_ignored_ancestor,
+                        has_ignored_ancestor.force(&self.ignore_fn),
                         hg_path,
                     );
                 }
--- a/rust/hg-core/src/errors.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/errors.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -33,6 +33,7 @@
     Abort {
         message: String,
         detailed_exit_code: exit_codes::ExitCode,
+        hint: Option<String>,
     },
 
     /// A configuration value is not in the expected syntax.
@@ -82,10 +83,12 @@
     pub fn abort(
         explanation: impl Into<String>,
         exit_code: exit_codes::ExitCode,
+        hint: Option<String>,
     ) -> Self {
         HgError::Abort {
             message: explanation.into(),
             detailed_exit_code: exit_code,
+            hint,
         }
     }
 }
--- a/rust/hg-core/src/exit_codes.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/exit_codes.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -9,6 +9,10 @@
 // Abort when there is a config related error
 pub const CONFIG_ERROR_ABORT: ExitCode = 30;
 
+/// Indicates that the operation might work if retried in a different state.
+/// Examples: Unresolved merge conflicts, unfinished operations
+pub const STATE_ERROR: ExitCode = 20;
+
 // Abort when there is an error while parsing config
 pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10;
 
--- a/rust/hg-core/src/filepatterns.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/filepatterns.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -314,6 +314,8 @@
         m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
         m.insert(b"include".as_ref(), b"include:".as_ref());
         m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
+        m.insert(b"path".as_ref(), b"path:".as_ref());
+        m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref());
         m
     };
 }
@@ -329,6 +331,7 @@
 pub fn parse_pattern_file_contents(
     lines: &[u8],
     file_path: &Path,
+    default_syntax_override: Option<&[u8]>,
     warn: bool,
 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
     let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
@@ -338,7 +341,8 @@
     let mut inputs: Vec<IgnorePattern> = vec![];
     let mut warnings: Vec<PatternFileWarning> = vec![];
 
-    let mut current_syntax = b"relre:".as_ref();
+    let mut current_syntax =
+        default_syntax_override.unwrap_or(b"relre:".as_ref());
 
     for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
         let line_number = line_number + 1;
@@ -413,7 +417,7 @@
     match std::fs::read(file_path) {
         Ok(contents) => {
             inspect_pattern_bytes(&contents);
-            parse_pattern_file_contents(&contents, file_path, warn)
+            parse_pattern_file_contents(&contents, file_path, None, warn)
         }
         Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok((
             vec![],
@@ -601,9 +605,14 @@
         let lines = b"syntax: glob\n*.elc";
 
         assert_eq!(
-            parse_pattern_file_contents(lines, Path::new("file_path"), false)
-                .unwrap()
-                .0,
+            parse_pattern_file_contents(
+                lines,
+                Path::new("file_path"),
+                None,
+                false
+            )
+            .unwrap()
+            .0,
             vec![IgnorePattern::new(
                 PatternSyntax::RelGlob,
                 b"*.elc",
@@ -614,16 +623,26 @@
         let lines = b"syntax: include\nsyntax: glob";
 
         assert_eq!(
-            parse_pattern_file_contents(lines, Path::new("file_path"), false)
-                .unwrap()
-                .0,
+            parse_pattern_file_contents(
+                lines,
+                Path::new("file_path"),
+                None,
+                false
+            )
+            .unwrap()
+            .0,
             vec![]
         );
         let lines = b"glob:**.o";
         assert_eq!(
-            parse_pattern_file_contents(lines, Path::new("file_path"), false)
-                .unwrap()
-                .0,
+            parse_pattern_file_contents(
+                lines,
+                Path::new("file_path"),
+                None,
+                false
+            )
+            .unwrap()
+            .0,
             vec![IgnorePattern::new(
                 PatternSyntax::RelGlob,
                 b"**.o",
--- a/rust/hg-core/src/lib.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/lib.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -7,6 +7,8 @@
 mod ancestors;
 pub mod dagops;
 pub mod errors;
+pub mod narrow;
+pub mod sparse;
 pub use ancestors::{AncestorsIterator, MissingAncestors};
 pub mod dirstate;
 pub mod dirstate_tree;
--- a/rust/hg-core/src/matchers.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/matchers.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -46,7 +46,7 @@
     Recursive,
 }
 
-pub trait Matcher {
+pub trait Matcher: core::fmt::Debug {
     /// Explicitly listed files
     fn file_set(&self) -> Option<&HashSet<HgPathBuf>>;
     /// Returns whether `filename` is in `file_set`
@@ -283,6 +283,18 @@
     parents: HashSet<HgPathBuf>,
 }
 
+impl core::fmt::Debug for IncludeMatcher<'_> {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("IncludeMatcher")
+            .field("patterns", &String::from_utf8_lossy(&self.patterns))
+            .field("prefix", &self.prefix)
+            .field("roots", &self.roots)
+            .field("dirs", &self.dirs)
+            .field("parents", &self.parents)
+            .finish()
+    }
+}
+
 impl<'a> Matcher for IncludeMatcher<'a> {
     fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
         None
@@ -330,6 +342,7 @@
 }
 
 /// The union of multiple matchers. Will match if any of the matchers match.
+#[derive(Debug)]
 pub struct UnionMatcher {
     matchers: Vec<Box<dyn Matcher + Sync>>,
 }
@@ -393,6 +406,7 @@
     }
 }
 
+#[derive(Debug)]
 pub struct IntersectionMatcher {
     m1: Box<dyn Matcher + Sync>,
     m2: Box<dyn Matcher + Sync>,
@@ -474,6 +488,91 @@
     }
 }
 
+#[derive(Debug)]
+pub struct DifferenceMatcher {
+    base: Box<dyn Matcher + Sync>,
+    excluded: Box<dyn Matcher + Sync>,
+    files: Option<HashSet<HgPathBuf>>,
+}
+
+impl Matcher for DifferenceMatcher {
+    fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
+        self.files.as_ref()
+    }
+
+    fn exact_match(&self, filename: &HgPath) -> bool {
+        self.files.as_ref().map_or(false, |f| f.contains(filename))
+    }
+
+    fn matches(&self, filename: &HgPath) -> bool {
+        self.base.matches(filename) && !self.excluded.matches(filename)
+    }
+
+    fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
+        let excluded_set = self.excluded.visit_children_set(directory);
+        if excluded_set == VisitChildrenSet::Recursive {
+            return VisitChildrenSet::Empty;
+        }
+        let base_set = self.base.visit_children_set(directory);
+        // Possible values for base: 'recursive', 'this', set(...), set()
+        // Possible values for excluded:          'this', set(...), set()
+        // If excluded has nothing under here that we care about, return base,
+        // even if it's 'recursive'.
+        if excluded_set == VisitChildrenSet::Empty {
+            return base_set;
+        }
+        match base_set {
+            VisitChildrenSet::This | VisitChildrenSet::Recursive => {
+                // Never return 'recursive' here if excluded_set is any kind of
+                // non-empty (either 'this' or set(foo)), since excluded might
+                // return set() for a subdirectory.
+                VisitChildrenSet::This
+            }
+            set => {
+                // Possible values for base:         set(...), set()
+                // Possible values for excluded: 'this', set(...)
+                // We ignore excluded set results. They're possibly incorrect:
+                //  base = path:dir/subdir
+                //  excluded=rootfilesin:dir,
+                //  visit_children_set(''):
+                //   base returns {'dir'}, excluded returns {'dir'}, if we
+                //   subtracted we'd return set(), which is *not* correct, we
+                //   still need to visit 'dir'!
+                set
+            }
+        }
+    }
+
+    fn matches_everything(&self) -> bool {
+        false
+    }
+
+    fn is_exact(&self) -> bool {
+        self.base.is_exact()
+    }
+}
+
+impl DifferenceMatcher {
+    pub fn new(
+        base: Box<dyn Matcher + Sync>,
+        excluded: Box<dyn Matcher + Sync>,
+    ) -> Self {
+        let base_is_exact = base.is_exact();
+        let base_files = base.file_set().map(ToOwned::to_owned);
+        let mut new = Self {
+            base,
+            excluded,
+            files: None,
+        };
+        if base_is_exact {
+            new.files = base_files.map(|files| {
+                files.iter().cloned().filter(|f| new.matches(f)).collect()
+            });
+        }
+        new
+    }
+}
+
 /// Returns a function that matches an `HgPath` against the given regex
 /// pattern.
 ///
@@ -1489,4 +1588,101 @@
             VisitChildrenSet::Empty
         );
     }
+
+    #[test]
+    fn test_differencematcher() {
+        // Two alwaysmatchers should function like a nevermatcher
+        let m1 = AlwaysMatcher;
+        let m2 = AlwaysMatcher;
+        let matcher = DifferenceMatcher::new(Box::new(m1), Box::new(m2));
+
+        for case in &[
+            &b""[..],
+            b"dir",
+            b"dir/subdir",
+            b"dir/subdir/z",
+            b"dir/foo",
+            b"dir/subdir/x",
+            b"folder",
+        ] {
+            assert_eq!(
+                matcher.visit_children_set(HgPath::new(case)),
+                VisitChildrenSet::Empty
+            );
+        }
+
+        // One always and one never should behave the same as an always
+        let m1 = AlwaysMatcher;
+        let m2 = NeverMatcher;
+        let matcher = DifferenceMatcher::new(Box::new(m1), Box::new(m2));
+
+        for case in &[
+            &b""[..],
+            b"dir",
+            b"dir/subdir",
+            b"dir/subdir/z",
+            b"dir/foo",
+            b"dir/subdir/x",
+            b"folder",
+        ] {
+            assert_eq!(
+                matcher.visit_children_set(HgPath::new(case)),
+                VisitChildrenSet::Recursive
+            );
+        }
+
+        // Two include matchers
+        let m1 = Box::new(
+            IncludeMatcher::new(vec![IgnorePattern::new(
+                PatternSyntax::RelPath,
+                b"dir/subdir",
+                Path::new("/repo"),
+            )])
+            .unwrap(),
+        );
+        let m2 = Box::new(
+            IncludeMatcher::new(vec![IgnorePattern::new(
+                PatternSyntax::RootFiles,
+                b"dir",
+                Path::new("/repo"),
+            )])
+            .unwrap(),
+        );
+
+        let matcher = DifferenceMatcher::new(m1, m2);
+
+        let mut set = HashSet::new();
+        set.insert(HgPathBuf::from_bytes(b"dir"));
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"")),
+            VisitChildrenSet::Set(set)
+        );
+
+        let mut set = HashSet::new();
+        set.insert(HgPathBuf::from_bytes(b"subdir"));
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"dir")),
+            VisitChildrenSet::Set(set)
+        );
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+            VisitChildrenSet::Recursive
+        );
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"dir/foo")),
+            VisitChildrenSet::Empty
+        );
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"folder")),
+            VisitChildrenSet::Empty
+        );
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+            VisitChildrenSet::This
+        );
+        assert_eq!(
+            matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+            VisitChildrenSet::This
+        );
+    }
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/narrow.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,111 @@
+use std::path::Path;
+
+use crate::{
+    errors::HgError,
+    exit_codes,
+    filepatterns::parse_pattern_file_contents,
+    matchers::{
+        AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
+        NeverMatcher,
+    },
+    repo::Repo,
+    requirements::NARROW_REQUIREMENT,
+    sparse::{self, SparseConfigError, SparseWarning},
+};
+
+/// The file in .hg/store/ that indicates which paths exit in the store
+const FILENAME: &str = "narrowspec";
+/// The file in .hg/ that indicates which paths exit in the dirstate
+const DIRSTATE_FILENAME: &str = "narrowspec.dirstate";
+
+/// Pattern prefixes that are allowed in narrow patterns. This list MUST
+/// only contain patterns that are fast and safe to evaluate. Keep in mind
+/// that patterns are supplied by clients and executed on remote servers
+/// as part of wire protocol commands. That means that changes to this
+/// data structure influence the wire protocol and should not be taken
+/// lightly - especially removals.
+const VALID_PREFIXES: [&str; 2] = ["path:", "rootfilesin:"];
+
+/// Return the matcher for the current narrow spec, and all configuration
+/// warnings to display.
+pub fn matcher(
+    repo: &Repo,
+) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
+    let mut warnings = vec![];
+    if !repo.requirements().contains(NARROW_REQUIREMENT) {
+        return Ok((Box::new(AlwaysMatcher), warnings));
+    }
+    // Treat "narrowspec does not exist" the same as "narrowspec file exists
+    // and is empty".
+    let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]);
+    let working_copy_spec =
+        repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]);
+    if store_spec != working_copy_spec {
+        return Err(HgError::abort(
+            "working copy's narrowspec is stale",
+            exit_codes::STATE_ERROR,
+            Some("run 'hg tracked --update-working-copy'".into()),
+        )
+        .into());
+    }
+
+    let config = sparse::parse_config(
+        &store_spec,
+        sparse::SparseConfigContext::Narrow,
+    )?;
+
+    warnings.extend(config.warnings);
+
+    if !config.profiles.is_empty() {
+        // TODO (from Python impl) maybe do something with profiles?
+        return Err(SparseConfigError::IncludesInNarrow);
+    }
+    validate_patterns(&config.includes)?;
+    validate_patterns(&config.excludes)?;
+
+    if config.includes.is_empty() {
+        return Ok((Box::new(NeverMatcher), warnings));
+    }
+
+    let (patterns, subwarnings) = parse_pattern_file_contents(
+        &config.includes,
+        Path::new(""),
+        None,
+        false,
+    )?;
+    warnings.extend(subwarnings.into_iter().map(From::from));
+
+    let mut m: Box<dyn Matcher + Sync> =
+        Box::new(IncludeMatcher::new(patterns)?);
+
+    let (patterns, subwarnings) = parse_pattern_file_contents(
+        &config.excludes,
+        Path::new(""),
+        None,
+        false,
+    )?;
+    if !patterns.is_empty() {
+        warnings.extend(subwarnings.into_iter().map(From::from));
+        let exclude_matcher = Box::new(IncludeMatcher::new(patterns)?);
+        m = Box::new(DifferenceMatcher::new(m, exclude_matcher));
+    }
+
+    Ok((m, warnings))
+}
+
+fn validate_patterns(patterns: &[u8]) -> Result<(), SparseConfigError> {
+    for pattern in patterns.split(|c| *c == b'\n') {
+        if pattern.is_empty() {
+            continue;
+        }
+        for prefix in VALID_PREFIXES.iter() {
+            if pattern.starts_with(prefix.as_bytes()) {
+                break;
+            }
+            return Err(SparseConfigError::InvalidNarrowPrefix(
+                pattern.to_owned(),
+            ));
+        }
+    }
+    Ok(())
+}
--- a/rust/hg-core/src/revlog/filelog.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/revlog/filelog.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -17,18 +17,21 @@
 }
 
 impl Filelog {
-    pub fn open(repo: &Repo, file_path: &HgPath) -> Result<Self, HgError> {
+    pub fn open_vfs(
+        store_vfs: &crate::vfs::Vfs<'_>,
+        file_path: &HgPath,
+    ) -> Result<Self, HgError> {
         let index_path = store_path(file_path, b".i");
         let data_path = store_path(file_path, b".d");
-        let revlog = Revlog::open(
-            &repo.store_vfs(),
-            index_path,
-            Some(&data_path),
-            false,
-        )?;
+        let revlog =
+            Revlog::open(store_vfs, index_path, Some(&data_path), false)?;
         Ok(Self { revlog })
     }
 
+    pub fn open(repo: &Repo, file_path: &HgPath) -> Result<Self, HgError> {
+        Self::open_vfs(&repo.store_vfs(), file_path)
+    }
+
     /// The given node ID is that of the file as found in a filelog, not of a
     /// changeset.
     pub fn data_for_node(
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/sparse.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,338 @@
+use std::{collections::HashSet, path::Path};
+
+use format_bytes::{write_bytes, DisplayBytes};
+
+use crate::{
+    errors::HgError,
+    filepatterns::parse_pattern_file_contents,
+    matchers::{
+        AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
+        UnionMatcher,
+    },
+    operations::cat,
+    repo::Repo,
+    requirements::SPARSE_REQUIREMENT,
+    utils::{hg_path::HgPath, SliceExt},
+    IgnorePattern, PatternError, PatternFileWarning, PatternSyntax, Revision,
+    NULL_REVISION,
+};
+
+/// Command which is triggering the config read
+#[derive(Copy, Clone, Debug)]
+pub enum SparseConfigContext {
+    Sparse,
+    Narrow,
+}
+
+impl DisplayBytes for SparseConfigContext {
+    fn display_bytes(
+        &self,
+        output: &mut dyn std::io::Write,
+    ) -> std::io::Result<()> {
+        match self {
+            SparseConfigContext::Sparse => write_bytes!(output, b"sparse"),
+            SparseConfigContext::Narrow => write_bytes!(output, b"narrow"),
+        }
+    }
+}
+
+/// Possible warnings when reading sparse configuration
+#[derive(Debug, derive_more::From)]
+pub enum SparseWarning {
+    /// Warns about improper paths that start with "/"
+    RootWarning {
+        context: SparseConfigContext,
+        line: Vec<u8>,
+    },
+    /// Warns about a profile missing from the given changelog revision
+    ProfileNotFound { profile: Vec<u8>, rev: Revision },
+    #[from]
+    Pattern(PatternFileWarning),
+}
+
+/// Parsed sparse config
+#[derive(Debug, Default)]
+pub struct SparseConfig {
+    // Line-separated
+    pub(crate) includes: Vec<u8>,
+    // Line-separated
+    pub(crate) excludes: Vec<u8>,
+    pub(crate) profiles: HashSet<Vec<u8>>,
+    pub(crate) warnings: Vec<SparseWarning>,
+}
+
+/// All possible errors when reading sparse/narrow config
+#[derive(Debug, derive_more::From)]
+pub enum SparseConfigError {
+    IncludesAfterExcludes {
+        context: SparseConfigContext,
+    },
+    EntryOutsideSection {
+        context: SparseConfigContext,
+        line: Vec<u8>,
+    },
+    /// Narrow config does not support '%include' directives
+    IncludesInNarrow,
+    /// An invalid pattern prefix was given to the narrow spec. Includes the
+    /// entire pattern for context.
+    InvalidNarrowPrefix(Vec<u8>),
+    #[from]
+    HgError(HgError),
+    #[from]
+    PatternError(PatternError),
+}
+
+/// Parse sparse config file content.
+pub(crate) fn parse_config(
+    raw: &[u8],
+    context: SparseConfigContext,
+) -> Result<SparseConfig, SparseConfigError> {
+    let mut includes = vec![];
+    let mut excludes = vec![];
+    let mut profiles = HashSet::new();
+    let mut warnings = vec![];
+
+    #[derive(PartialEq, Eq)]
+    enum Current {
+        Includes,
+        Excludes,
+        None,
+    }
+
+    let mut current = Current::None;
+    let mut in_section = false;
+
+    for line in raw.split(|c| *c == b'\n') {
+        let line = line.trim();
+        if line.is_empty() || line[0] == b'#' {
+            // empty or comment line, skip
+            continue;
+        }
+        if line.starts_with(b"%include ") {
+            let profile = line[b"%include ".len()..].trim();
+            if !profile.is_empty() {
+                profiles.insert(profile.into());
+            }
+        } else if line == b"[include]" {
+            if in_section && current == Current::Includes {
+                return Err(SparseConfigError::IncludesAfterExcludes {
+                    context,
+                });
+            }
+            in_section = true;
+            current = Current::Includes;
+            continue;
+        } else if line == b"[exclude]" {
+            in_section = true;
+            current = Current::Excludes;
+        } else {
+            if current == Current::None {
+                return Err(SparseConfigError::EntryOutsideSection {
+                    context,
+                    line: line.into(),
+                });
+            }
+            if line.trim().starts_with(b"/") {
+                warnings.push(SparseWarning::RootWarning {
+                    context,
+                    line: line.into(),
+                });
+                continue;
+            }
+            match current {
+                Current::Includes => {
+                    includes.push(b'\n');
+                    includes.extend(line.iter());
+                }
+                Current::Excludes => {
+                    excludes.push(b'\n');
+                    excludes.extend(line.iter());
+                }
+                Current::None => unreachable!(),
+            }
+        }
+    }
+
+    Ok(SparseConfig {
+        includes,
+        excludes,
+        profiles,
+        warnings,
+    })
+}
+
+fn read_temporary_includes(
+    repo: &Repo,
+) -> Result<Vec<Vec<u8>>, SparseConfigError> {
+    let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
+    if raw.is_empty() {
+        return Ok(vec![]);
+    }
+    Ok(raw.split(|c| *c == b'\n').map(ToOwned::to_owned).collect())
+}
+
+/// Obtain sparse checkout patterns for the given revision
+fn patterns_for_rev(
+    repo: &Repo,
+    rev: Revision,
+) -> Result<Option<SparseConfig>, SparseConfigError> {
+    if !repo.has_sparse() {
+        return Ok(None);
+    }
+    let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
+
+    if raw.is_empty() {
+        return Ok(None);
+    }
+
+    let mut config = parse_config(&raw, SparseConfigContext::Sparse)?;
+
+    if !config.profiles.is_empty() {
+        let mut profiles: Vec<Vec<u8>> = config.profiles.into_iter().collect();
+        let mut visited = HashSet::new();
+
+        while let Some(profile) = profiles.pop() {
+            if visited.contains(&profile) {
+                continue;
+            }
+            visited.insert(profile.to_owned());
+
+            let output =
+                cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
+                    .map_err(|_| {
+                        HgError::corrupted(format!(
+                            "dirstate points to non-existent parent node"
+                        ))
+                    })?;
+            if output.results.is_empty() {
+                config.warnings.push(SparseWarning::ProfileNotFound {
+                    profile: profile.to_owned(),
+                    rev,
+                })
+            }
+
+            let subconfig = parse_config(
+                &output.results[0].1,
+                SparseConfigContext::Sparse,
+            )?;
+            if !subconfig.includes.is_empty() {
+                config.includes.push(b'\n');
+                config.includes.extend(&subconfig.includes);
+            }
+            if !subconfig.includes.is_empty() {
+                config.includes.push(b'\n');
+                config.excludes.extend(&subconfig.excludes);
+            }
+            config.warnings.extend(subconfig.warnings.into_iter());
+            profiles.extend(subconfig.profiles.into_iter());
+        }
+
+        config.profiles = visited;
+    }
+
+    if !config.includes.is_empty() {
+        config.includes.extend(b"\n.hg*");
+    }
+
+    Ok(Some(config))
+}
+
+/// Obtain a matcher for sparse working directories.
+pub fn matcher(
+    repo: &Repo,
+) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
+    let mut warnings = vec![];
+    if !repo.requirements().contains(SPARSE_REQUIREMENT) {
+        return Ok((Box::new(AlwaysMatcher), warnings));
+    }
+
+    let parents = repo.dirstate_parents()?;
+    let mut revs = vec![];
+    let p1_rev =
+        repo.changelog()?
+            .rev_from_node(parents.p1.into())
+            .map_err(|_| {
+                HgError::corrupted(format!(
+                    "dirstate points to non-existent parent node"
+                ))
+            })?;
+    if p1_rev != NULL_REVISION {
+        revs.push(p1_rev)
+    }
+    let p2_rev =
+        repo.changelog()?
+            .rev_from_node(parents.p2.into())
+            .map_err(|_| {
+                HgError::corrupted(format!(
+                    "dirstate points to non-existent parent node"
+                ))
+            })?;
+    if p2_rev != NULL_REVISION {
+        revs.push(p2_rev)
+    }
+    let mut matchers = vec![];
+
+    for rev in revs.iter() {
+        let config = patterns_for_rev(repo, *rev);
+        if let Ok(Some(config)) = config {
+            warnings.extend(config.warnings);
+            let mut m: Box<dyn Matcher + Sync> = Box::new(AlwaysMatcher);
+            if !config.includes.is_empty() {
+                let (patterns, subwarnings) = parse_pattern_file_contents(
+                    &config.includes,
+                    Path::new(""),
+                    Some(b"relglob:".as_ref()),
+                    false,
+                )?;
+                warnings.extend(subwarnings.into_iter().map(From::from));
+                m = Box::new(IncludeMatcher::new(patterns)?);
+            }
+            if !config.excludes.is_empty() {
+                let (patterns, subwarnings) = parse_pattern_file_contents(
+                    &config.excludes,
+                    Path::new(""),
+                    Some(b"relglob:".as_ref()),
+                    false,
+                )?;
+                warnings.extend(subwarnings.into_iter().map(From::from));
+                m = Box::new(DifferenceMatcher::new(
+                    m,
+                    Box::new(IncludeMatcher::new(patterns)?),
+                ));
+            }
+            matchers.push(m);
+        }
+    }
+    let result: Box<dyn Matcher + Sync> = match matchers.len() {
+        0 => Box::new(AlwaysMatcher),
+        1 => matchers.pop().expect("1 is equal to 0"),
+        _ => Box::new(UnionMatcher::new(matchers)),
+    };
+
+    let matcher =
+        force_include_matcher(result, &read_temporary_includes(repo)?)?;
+    Ok((matcher, warnings))
+}
+
+/// Returns a matcher that returns true for any of the forced includes before
+/// testing against the actual matcher
+fn force_include_matcher(
+    result: Box<dyn Matcher + Sync>,
+    temp_includes: &[Vec<u8>],
+) -> Result<Box<dyn Matcher + Sync>, PatternError> {
+    if temp_includes.is_empty() {
+        return Ok(result);
+    }
+    let forced_include_matcher = IncludeMatcher::new(
+        temp_includes
+            .into_iter()
+            .map(|include| {
+                IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
+            })
+            .collect(),
+    )?;
+    Ok(Box::new(UnionMatcher::new(vec![
+        Box::new(forced_include_matcher),
+        result,
+    ])))
+}
--- a/rust/hg-core/src/vfs.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-core/src/vfs.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -40,6 +40,23 @@
         std::fs::read(&path).when_reading_file(&path)
     }
 
+    /// Returns `Ok(None)` if the file does not exist.
+    pub fn try_read(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> Result<Option<Vec<u8>>, HgError> {
+        match self.read(relative_path) {
+            Err(e) => match &e {
+                HgError::IoError { error, .. } => match error.kind() {
+                    ErrorKind::NotFound => return Ok(None),
+                    _ => Err(e),
+                },
+                _ => Err(e),
+            },
+            Ok(v) => Ok(Some(v)),
+        }
+    }
+
     fn mmap_open_gen(
         &self,
         relative_path: impl AsRef<Path>,
--- a/rust/hg-cpython/src/dirstate/status.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/hg-cpython/src/dirstate/status.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -15,7 +15,10 @@
     PyResult, PyTuple, Python, PythonObject, ToPyObject,
 };
 use hg::dirstate::status::StatusPath;
-use hg::matchers::{IntersectionMatcher, Matcher, NeverMatcher, UnionMatcher};
+use hg::matchers::{
+    DifferenceMatcher, IntersectionMatcher, Matcher, NeverMatcher,
+    UnionMatcher,
+};
 use hg::{
     matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
     parse_pattern_syntax,
@@ -233,6 +236,12 @@
 
             Ok(Box::new(IntersectionMatcher::new(m1, m2)))
         }
+        "differencematcher" => {
+            let m1 = extract_matcher(py, matcher.getattr(py, "_m1")?)?;
+            let m2 = extract_matcher(py, matcher.getattr(py, "_m2")?)?;
+
+            Ok(Box::new(DifferenceMatcher::new(m1, m2)))
+        }
         e => Err(PyErr::new::<FallbackError, _>(
             py,
             format!("Unsupported matcher {}", e),
--- a/rust/rhg/Cargo.toml	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/Cargo.toml	Mon Oct 24 15:32:14 2022 +0200
@@ -22,3 +22,4 @@
 format-bytes = "0.3.0"
 users = "0.11.0"
 which = "4.2.5"
+rayon = "1.5.1"
--- a/rust/rhg/src/commands/debugdata.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/src/commands/debugdata.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -55,6 +55,11 @@
         };
 
     let repo = invocation.repo?;
+    if repo.has_narrow() {
+        return Err(CommandError::unsupported(
+            "support for ellipsis nodes is missing and repo has narrow enabled",
+        ));
+    }
     let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?;
 
     let mut stdout = invocation.ui.stdout_buffer();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/rhg/src/commands/debugrhgsparse.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,43 @@
+use std::os::unix::prelude::OsStrExt;
+
+use crate::error::CommandError;
+use clap::SubCommand;
+use hg::{self, utils::hg_path::HgPath};
+
+pub const HELP_TEXT: &str = "";
+
+pub fn args() -> clap::App<'static, 'static> {
+    SubCommand::with_name("debugrhgsparse")
+        .arg(
+            clap::Arg::with_name("files")
+                .required(true)
+                .multiple(true)
+                .empty_values(false)
+                .value_name("FILES")
+                .help("Files to check against sparse profile"),
+        )
+        .about(HELP_TEXT)
+}
+
+pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
+    let repo = invocation.repo?;
+
+    let (matcher, _warnings) = hg::sparse::matcher(&repo).unwrap();
+    let files = invocation.subcommand_args.values_of_os("files");
+    if let Some(files) = files {
+        for file in files {
+            invocation.ui.write_stdout(b"matches: ")?;
+            invocation.ui.write_stdout(
+                if matcher.matches(HgPath::new(file.as_bytes())) {
+                    b"yes"
+                } else {
+                    b"no"
+                },
+            )?;
+            invocation.ui.write_stdout(b" | file: ")?;
+            invocation.ui.write_stdout(file.as_bytes())?;
+            invocation.ui.write_stdout(b"\n")?;
+        }
+    }
+    Ok(())
+}
--- a/rust/rhg/src/commands/status.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/src/commands/status.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -10,7 +10,6 @@
 use crate::utils::path_utils::RelativizePaths;
 use clap::{Arg, SubCommand};
 use format_bytes::format_bytes;
-use hg;
 use hg::config::Config;
 use hg::dirstate::has_exec_bit;
 use hg::dirstate::status::StatusPath;
@@ -18,7 +17,7 @@
 use hg::errors::{HgError, IoResultExt};
 use hg::lock::LockError;
 use hg::manifest::Manifest;
-use hg::matchers::AlwaysMatcher;
+use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
 use hg::repo::Repo;
 use hg::utils::files::get_bytes_from_os_string;
 use hg::utils::files::get_bytes_from_path;
@@ -28,7 +27,9 @@
 use hg::PatternFileWarning;
 use hg::StatusError;
 use hg::StatusOptions;
+use hg::{self, narrow, sparse};
 use log::info;
+use rayon::prelude::*;
 use std::io;
 use std::path::PathBuf;
 
@@ -104,6 +105,12 @@
                 .short("-n")
                 .long("--no-status"),
         )
+        .arg(
+            Arg::with_name("verbose")
+                .help("enable additional output")
+                .short("-v")
+                .long("--verbose"),
+        )
 }
 
 /// Pure data type allowing the caller to specify file states to display
@@ -150,18 +157,35 @@
     }
 }
 
+fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
+    return Ok(repo.dirstate_parents()?.is_merge());
+}
+
+fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
+    // These are all the known values for the [fname] argument of
+    // [addunfinished] function in [state.py]
+    let known_state_files: &[&str] = &[
+        "bisect.state",
+        "graftstate",
+        "histedit-state",
+        "rebasestate",
+        "shelvedstate",
+        "transplant/journal",
+        "updatestate",
+    ];
+    if has_unfinished_merge(repo)? {
+        return Ok(true);
+    };
+    for f in known_state_files {
+        if repo.hg_vfs().join(f).exists() {
+            return Ok(true);
+        }
+    }
+    return Ok(false);
+}
+
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     // TODO: lift these limitations
-    if invocation.config.get_bool(b"ui", b"tweakdefaults")? {
-        return Err(CommandError::unsupported(
-            "ui.tweakdefaults is not yet supported with rhg status",
-        ));
-    }
-    if invocation.config.get_bool(b"ui", b"statuscopies")? {
-        return Err(CommandError::unsupported(
-            "ui.statuscopies is not yet supported with rhg status",
-        ));
-    }
     if invocation
         .config
         .get(b"commands", b"status.terse")
@@ -176,15 +200,10 @@
     let config = invocation.config;
     let args = invocation.subcommand_args;
 
-    let verbose = !ui.plain(None)
-        && !args.is_present("print0")
-        && (config.get_bool(b"ui", b"verbose")?
+    let verbose = !args.is_present("print0")
+        && (args.is_present("verbose")
+            || config.get_bool(b"ui", b"verbose")?
             || config.get_bool(b"commands", b"status.verbose")?);
-    if verbose {
-        return Err(CommandError::unsupported(
-            "verbose status is not supported yet",
-        ));
-    }
 
     let all = args.is_present("all");
     let display_states = if all {
@@ -214,10 +233,12 @@
 
     let repo = invocation.repo?;
 
-    if repo.has_sparse() || repo.has_narrow() {
-        return Err(CommandError::unsupported(
-            "rhg status is not supported for sparse checkouts or narrow clones yet"
-        ));
+    if verbose {
+        if has_unfinished_state(repo)? {
+            return Err(CommandError::unsupported(
+                "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
+            ));
+        };
     }
 
     let mut dmap = repo.dirstate_map_mut()?;
@@ -239,28 +260,7 @@
     let after_status = |res: StatusResult| -> Result<_, CommandError> {
         let (mut ds_status, pattern_warnings) = res?;
         for warning in pattern_warnings {
-            match warning {
-                hg::PatternFileWarning::InvalidSyntax(path, syntax) => ui
-                    .write_stderr(&format_bytes!(
-                        b"{}: ignoring invalid syntax '{}'\n",
-                        get_bytes_from_path(path),
-                        &*syntax
-                    ))?,
-                hg::PatternFileWarning::NoSuchFile(path) => {
-                    let path = if let Ok(relative) =
-                        path.strip_prefix(repo.working_directory_path())
-                    {
-                        relative
-                    } else {
-                        &*path
-                    };
-                    ui.write_stderr(&format_bytes!(
-                        b"skipping unreadable pattern file '{}': \
-                          No such file or directory\n",
-                        get_bytes_from_path(path),
-                    ))?
-                }
-            }
+            ui.write_stderr(&print_pattern_file_warning(&warning, &repo))?;
         }
 
         for (path, error) in ds_status.bad {
@@ -292,23 +292,37 @@
             let manifest = repo.manifest_for_node(p1).map_err(|e| {
                 CommandError::from((e, &*format!("{:x}", p1.short())))
             })?;
-            for to_check in ds_status.unsure {
-                if unsure_is_modified(repo, &manifest, &to_check.path)? {
+            let working_directory_vfs = repo.working_directory_vfs();
+            let store_vfs = repo.store_vfs();
+            let res: Vec<_> = ds_status
+                .unsure
+                .into_par_iter()
+                .map(|to_check| {
+                    unsure_is_modified(
+                        working_directory_vfs,
+                        store_vfs,
+                        &manifest,
+                        &to_check.path,
+                    )
+                    .map(|modified| (to_check, modified))
+                })
+                .collect::<Result<_, _>>()?;
+            for (status_path, is_modified) in res.into_iter() {
+                if is_modified {
                     if display_states.modified {
-                        ds_status.modified.push(to_check);
+                        ds_status.modified.push(status_path);
                     }
                 } else {
                     if display_states.clean {
-                        ds_status.clean.push(to_check.clone());
+                        ds_status.clean.push(status_path.clone());
                     }
-                    fixup.push(to_check.path.into_owned())
+                    fixup.push(status_path.path.into_owned())
                 }
             }
         }
-        let relative_paths = (!ui.plain(None))
-            && config
-                .get_option(b"commands", b"status.relative")?
-                .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
+        let relative_paths = config
+            .get_option(b"commands", b"status.relative")?
+            .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
         let output = DisplayStatusPaths {
             ui,
             no_status,
@@ -350,9 +364,45 @@
             filesystem_time_at_status_start,
         ))
     };
+    let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
+    let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
+    let matcher = match (repo.has_narrow(), repo.has_sparse()) {
+        (true, true) => {
+            Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
+        }
+        (true, false) => narrow_matcher,
+        (false, true) => sparse_matcher,
+        (false, false) => Box::new(AlwaysMatcher),
+    };
+
+    for warning in narrow_warnings.into_iter().chain(sparse_warnings) {
+        match &warning {
+            sparse::SparseWarning::RootWarning { context, line } => {
+                let msg = format_bytes!(
+                    b"warning: {} profile cannot use paths \"
+                    starting with /, ignoring {}\n",
+                    context,
+                    line
+                );
+                ui.write_stderr(&msg)?;
+            }
+            sparse::SparseWarning::ProfileNotFound { profile, rev } => {
+                let msg = format_bytes!(
+                    b"warning: sparse profile '{}' not found \"
+                    in rev {} - ignoring it\n",
+                    profile,
+                    rev
+                );
+                ui.write_stderr(&msg)?;
+            }
+            sparse::SparseWarning::Pattern(e) => {
+                ui.write_stderr(&print_pattern_file_warning(e, &repo))?;
+            }
+        }
+    }
     let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
         dmap.with_status(
-            &AlwaysMatcher,
+            matcher.as_ref(),
             repo.working_directory_path().to_owned(),
             ignore_files(repo, config),
             options,
@@ -491,11 +541,12 @@
 /// This meant to be used for those that the dirstate cannot resolve, due
 /// to time resolution limits.
 fn unsure_is_modified(
-    repo: &Repo,
+    working_directory_vfs: hg::vfs::Vfs,
+    store_vfs: hg::vfs::Vfs,
     manifest: &Manifest,
     hg_path: &HgPath,
 ) -> Result<bool, HgError> {
-    let vfs = repo.working_directory_vfs();
+    let vfs = working_directory_vfs;
     let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
     let fs_metadata = vfs.symlink_metadata(&fs_path)?;
     let is_symlink = fs_metadata.file_type().is_symlink();
@@ -515,7 +566,7 @@
     if entry.flags != fs_flags {
         return Ok(true);
     }
-    let filelog = repo.filelog(hg_path)?;
+    let filelog = hg::filelog::Filelog::open_vfs(&store_vfs, hg_path)?;
     let fs_len = fs_metadata.len();
     let file_node = entry.node_id()?;
     let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
@@ -545,3 +596,30 @@
     };
     Ok(p1_contents != &*fs_contents)
 }
+
+fn print_pattern_file_warning(
+    warning: &PatternFileWarning,
+    repo: &Repo,
+) -> Vec<u8> {
+    match warning {
+        PatternFileWarning::InvalidSyntax(path, syntax) => format_bytes!(
+            b"{}: ignoring invalid syntax '{}'\n",
+            get_bytes_from_path(path),
+            &*syntax
+        ),
+        PatternFileWarning::NoSuchFile(path) => {
+            let path = if let Ok(relative) =
+                path.strip_prefix(repo.working_directory_path())
+            {
+                relative
+            } else {
+                &*path
+            };
+            format_bytes!(
+                b"skipping unreadable pattern file '{}': \
+                    No such file or directory\n",
+                get_bytes_from_path(path),
+            )
+        }
+    }
+}
--- a/rust/rhg/src/error.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/src/error.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -8,6 +8,7 @@
 use hg::exit_codes;
 use hg::repo::RepoError;
 use hg::revlog::revlog::RevlogError;
+use hg::sparse::SparseConfigError;
 use hg::utils::files::get_bytes_from_path;
 use hg::{DirstateError, DirstateMapError, StatusError};
 use std::convert::From;
@@ -19,6 +20,7 @@
     Abort {
         message: Vec<u8>,
         detailed_exit_code: exit_codes::ExitCode,
+        hint: Option<Vec<u8>>,
     },
 
     /// Exit with a failure exit code but no message.
@@ -49,6 +51,32 @@
             // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
             message: utf8_to_local(message.as_ref()).into(),
             detailed_exit_code: detailed_exit_code,
+            hint: None,
+        }
+    }
+
+    pub fn abort_with_exit_code_and_hint(
+        message: impl AsRef<str>,
+        detailed_exit_code: exit_codes::ExitCode,
+        hint: Option<impl AsRef<str>>,
+    ) -> Self {
+        CommandError::Abort {
+            message: utf8_to_local(message.as_ref()).into(),
+            detailed_exit_code,
+            hint: hint.map(|h| utf8_to_local(h.as_ref()).into()),
+        }
+    }
+
+    pub fn abort_with_exit_code_bytes(
+        message: impl AsRef<[u8]>,
+        detailed_exit_code: exit_codes::ExitCode,
+    ) -> Self {
+        // TODO: use this everywhere it makes sense instead of the string
+        // version.
+        CommandError::Abort {
+            message: message.as_ref().into(),
+            detailed_exit_code,
+            hint: None,
         }
     }
 
@@ -79,9 +107,12 @@
             HgError::Abort {
                 message,
                 detailed_exit_code,
-            } => {
-                CommandError::abort_with_exit_code(message, detailed_exit_code)
-            }
+                hint,
+            } => CommandError::abort_with_exit_code_and_hint(
+                message,
+                detailed_exit_code,
+                hint,
+            ),
             _ => CommandError::abort(error.to_string()),
         }
     }
@@ -108,13 +139,15 @@
 impl From<RepoError> for CommandError {
     fn from(error: RepoError) -> Self {
         match error {
-            RepoError::NotFound { at } => CommandError::Abort {
-                message: format_bytes!(
-                    b"abort: repository {} not found",
-                    get_bytes_from_path(at)
-                ),
-                detailed_exit_code: exit_codes::ABORT,
-            },
+            RepoError::NotFound { at } => {
+                CommandError::abort_with_exit_code_bytes(
+                    format_bytes!(
+                        b"abort: repository {} not found",
+                        get_bytes_from_path(at)
+                    ),
+                    exit_codes::ABORT,
+                )
+            }
             RepoError::ConfigParseError(error) => error.into(),
             RepoError::Other(error) => error.into(),
         }
@@ -124,13 +157,13 @@
 impl<'a> From<&'a NoRepoInCwdError> for CommandError {
     fn from(error: &'a NoRepoInCwdError) -> Self {
         let NoRepoInCwdError { cwd } = error;
-        CommandError::Abort {
-            message: format_bytes!(
+        CommandError::abort_with_exit_code_bytes(
+            format_bytes!(
                 b"abort: no repository found in '{}' (.hg not found)!",
                 get_bytes_from_path(cwd)
             ),
-            detailed_exit_code: exit_codes::ABORT,
-        }
+            exit_codes::ABORT,
+        )
     }
 }
 
@@ -155,15 +188,15 @@
         } else {
             Vec::new()
         };
-        CommandError::Abort {
-            message: format_bytes!(
+        CommandError::abort_with_exit_code_bytes(
+            format_bytes!(
                 b"config error at {}{}: {}",
                 origin,
                 line_message,
                 message
             ),
-            detailed_exit_code: exit_codes::CONFIG_ERROR_ABORT,
-        }
+            exit_codes::CONFIG_ERROR_ABORT,
+        )
     }
 }
 
@@ -212,3 +245,46 @@
         HgError::from(error).into()
     }
 }
+
+impl From<SparseConfigError> for CommandError {
+    fn from(e: SparseConfigError) -> Self {
+        match e {
+            SparseConfigError::IncludesAfterExcludes { context } => {
+                Self::abort_with_exit_code_bytes(
+                    format_bytes!(
+                        b"{} config cannot have includes after excludes",
+                        context
+                    ),
+                    exit_codes::CONFIG_PARSE_ERROR_ABORT,
+                )
+            }
+            SparseConfigError::EntryOutsideSection { context, line } => {
+                Self::abort_with_exit_code_bytes(
+                    format_bytes!(
+                        b"{} config entry outside of section: {}",
+                        context,
+                        &line,
+                    ),
+                    exit_codes::CONFIG_PARSE_ERROR_ABORT,
+                )
+            }
+            SparseConfigError::InvalidNarrowPrefix(prefix) => {
+                Self::abort_with_exit_code_bytes(
+                    format_bytes!(
+                        b"invalid prefix on narrow pattern: {}",
+                        &prefix
+                    ),
+                    exit_codes::ABORT,
+                )
+            }
+            SparseConfigError::IncludesInNarrow => Self::abort(
+                "including other spec files using '%include' \
+                    is not supported in narrowspec",
+            ),
+            SparseConfigError::HgError(e) => Self::from(e),
+            SparseConfigError::PatternError(e) => {
+                Self::unsupported(format!("{}", e))
+            }
+        }
+    }
+}
--- a/rust/rhg/src/main.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/src/main.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -6,11 +6,12 @@
 use clap::Arg;
 use clap::ArgMatches;
 use format_bytes::{format_bytes, join};
-use hg::config::{Config, ConfigSource};
+use hg::config::{Config, ConfigSource, PlainInfo};
 use hg::repo::{Repo, RepoError};
 use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes};
 use hg::utils::SliceExt;
 use hg::{exit_codes, requirements};
+use std::borrow::Cow;
 use std::collections::HashSet;
 use std::ffi::OsString;
 use std::os::unix::prelude::CommandExt;
@@ -300,6 +301,24 @@
         }
     };
 
+    let exit =
+        |ui: &Ui, config: &Config, result: Result<(), CommandError>| -> ! {
+            exit(
+                &argv,
+                &initial_current_dir,
+                ui,
+                OnUnsupported::from_config(config),
+                result,
+                // TODO: show a warning or combine with original error if
+                // `get_bool` returns an error
+                non_repo_config
+                    .get_bool(b"ui", b"detailed-exit-code")
+                    .unwrap_or(false),
+            )
+        };
+    let early_exit = |config: &Config, error: CommandError| -> ! {
+        exit(&Ui::new_infallible(config), &config, Err(error))
+    };
     let repo_result = match Repo::find(&non_repo_config, repo_path.to_owned())
     {
         Ok(repo) => Ok(repo),
@@ -307,18 +326,7 @@
             // Not finding a repo is not fatal yet, if `-R` was not given
             Err(NoRepoInCwdError { cwd: at })
         }
-        Err(error) => exit(
-            &argv,
-            &initial_current_dir,
-            &Ui::new_infallible(&non_repo_config),
-            OnUnsupported::from_config(&non_repo_config),
-            Err(error.into()),
-            // TODO: show a warning or combine with original error if
-            // `get_bool` returns an error
-            non_repo_config
-                .get_bool(b"ui", b"detailed-exit-code")
-                .unwrap_or(false),
-        ),
+        Err(error) => early_exit(&non_repo_config, error.into()),
     };
 
     let config = if let Ok(repo) = &repo_result {
@@ -326,20 +334,20 @@
     } else {
         &non_repo_config
     };
-    let ui = Ui::new(&config).unwrap_or_else(|error| {
-        exit(
-            &argv,
-            &initial_current_dir,
-            &Ui::new_infallible(&config),
-            OnUnsupported::from_config(&config),
-            Err(error.into()),
-            config
-                .get_bool(b"ui", b"detailed-exit-code")
-                .unwrap_or(false),
-        )
-    });
-    let on_unsupported = OnUnsupported::from_config(config);
 
+    let mut config_cow = Cow::Borrowed(config);
+    config_cow.to_mut().apply_plain(PlainInfo::from_env());
+    if !ui::plain(Some("tweakdefaults"))
+        && config_cow
+            .as_ref()
+            .get_bool(b"ui", b"tweakdefaults")
+            .unwrap_or_else(|error| early_exit(&config, error.into()))
+    {
+        config_cow.to_mut().tweakdefaults()
+    };
+    let config = config_cow.as_ref();
+    let ui = Ui::new(&config)
+        .unwrap_or_else(|error| early_exit(&config, error.into()));
     let result = main_with_result(
         argv.iter().map(|s| s.to_owned()).collect(),
         &process_start_time,
@@ -347,18 +355,7 @@
         repo_result.as_ref(),
         config,
     );
-    exit(
-        &argv,
-        &initial_current_dir,
-        &ui,
-        on_unsupported,
-        result,
-        // TODO: show a warning or combine with original error if `get_bool`
-        // returns an error
-        config
-            .get_bool(b"ui", b"detailed-exit-code")
-            .unwrap_or(false),
-    )
+    exit(&ui, &config, result)
 }
 
 fn main() -> ! {
@@ -372,8 +369,7 @@
     match result {
         Ok(()) => exit_codes::OK,
         Err(CommandError::Abort {
-            message: _,
-            detailed_exit_code,
+            detailed_exit_code, ..
         }) => {
             if use_detailed_exit_code {
                 *detailed_exit_code
@@ -480,15 +476,15 @@
     match &result {
         Ok(_) => {}
         Err(CommandError::Unsuccessful) => {}
-        Err(CommandError::Abort {
-            message,
-            detailed_exit_code: _,
-        }) => {
+        Err(CommandError::Abort { message, hint, .. }) => {
+            // Ignore errors when writing to stderr, we’re already exiting
+            // with failure code so there’s not much more we can do.
             if !message.is_empty() {
-                // Ignore errors when writing to stderr, we’re already exiting
-                // with failure code so there’s not much more we can do.
                 let _ = ui.write_stderr(&format_bytes!(b"{}\n", message));
             }
+            if let Some(hint) = hint {
+                let _ = ui.write_stderr(&format_bytes!(b"({})\n", hint));
+            }
         }
         Err(CommandError::UnsupportedFeature { message }) => {
             match on_unsupported {
@@ -546,6 +542,7 @@
     debugdata
     debugrequirements
     debugignorerhg
+    debugrhgsparse
     files
     root
     config
@@ -677,8 +674,15 @@
 /// The `*` extension is an edge-case for config sub-options that apply to all
 /// extensions. For now, only `:required` exists, but that may change in the
 /// future.
-const SUPPORTED_EXTENSIONS: &[&[u8]] =
-    &[b"blackbox", b"share", b"sparse", b"narrow", b"*"];
+const SUPPORTED_EXTENSIONS: &[&[u8]] = &[
+    b"blackbox",
+    b"share",
+    b"sparse",
+    b"narrow",
+    b"*",
+    b"strip",
+    b"rebase",
+];
 
 fn check_extensions(config: &Config) -> Result<(), CommandError> {
     if let Some(b"*") = config.get(b"rhg", b"ignored-extensions") {
@@ -687,13 +691,18 @@
     }
 
     let enabled: HashSet<&[u8]> = config
-        .get_section_keys(b"extensions")
-        .into_iter()
-        .map(|extension| {
+        .iter_section(b"extensions")
+        .filter_map(|(extension, value)| {
+            if value == b"!" {
+                // Filter out disabled extensions
+                return None;
+            }
             // Ignore extension suboptions. Only `required` exists for now.
             // `rhg` either supports an extension or doesn't, so it doesn't
             // make sense to consider the loading of an extension.
-            extension.split_2(b':').unwrap_or((extension, b"")).0
+            let actual_extension =
+                extension.split_2(b':').unwrap_or((extension, b"")).0;
+            Some(actual_extension)
         })
         .collect();
 
--- a/rust/rhg/src/ui.rs	Thu Oct 20 12:05:17 2022 -0400
+++ b/rust/rhg/src/ui.rs	Mon Oct 24 15:32:14 2022 +0200
@@ -3,10 +3,9 @@
 use format_bytes::format_bytes;
 use format_bytes::write_bytes;
 use hg::config::Config;
+use hg::config::PlainInfo;
 use hg::errors::HgError;
-use hg::utils::files::get_bytes_from_os_string;
 use std::borrow::Cow;
-use std::env;
 use std::io;
 use std::io::{ErrorKind, Write};
 
@@ -127,35 +126,15 @@
         }
         stdout.flush()
     }
-
-    /// Return whether plain mode is active.
-    ///
-    /// Plain mode means that all configuration variables which affect
-    /// the behavior and output of Mercurial should be
-    /// ignored. Additionally, the output should be stable,
-    /// reproducible and suitable for use in scripts or applications.
-    ///
-    /// The only way to trigger plain mode is by setting either the
-    /// `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
-    ///
-    /// The return value can either be
-    /// - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
-    /// - False if feature is disabled by default and not included in HGPLAIN
-    /// - True otherwise
-    pub fn plain(&self, feature: Option<&str>) -> bool {
-        plain(feature)
-    }
 }
 
+// TODO: pass the PlainInfo to call sites directly and
+// delete this function
 pub fn plain(opt_feature: Option<&str>) -> bool {
-    if let Some(except) = env::var_os("HGPLAINEXCEPT") {
-        opt_feature.map_or(true, |feature| {
-            get_bytes_from_os_string(except)
-                .split(|&byte| byte == b',')
-                .all(|exception| exception != feature.as_bytes())
-        })
-    } else {
-        env::var_os("HGPLAIN").is_some()
+    let plain_info = PlainInfo::from_env();
+    match opt_feature {
+        None => plain_info.is_plain(),
+        Some(feature) => plain_info.is_feature_plain(feature),
     }
 }
 
--- a/setup.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/setup.py	Mon Oct 24 15:32:14 2022 +0200
@@ -666,30 +666,55 @@
 
 class buildhgexe(build_ext):
     description = 'compile hg.exe from mercurial/exewrapper.c'
-    user_options = build_ext.user_options + [
-        (
-            'long-paths-support',
-            None,
-            'enable support for long paths on '
-            'Windows (off by default and '
-            'experimental)',
-        ),
-    ]
 
-    LONG_PATHS_MANIFEST = """
-    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-    <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
-        <application>
-            <windowsSettings
-            xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
-                <ws2:longPathAware>true</ws2:longPathAware>
-            </windowsSettings>
-        </application>
-    </assembly>"""
+    LONG_PATHS_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+    <security>
+      <requestedPrivileges>
+        <requestedExecutionLevel
+          level="asInvoker"
+          uiAccess="false"
+        />
+      </requestedPrivileges>
+    </security>
+  </trustInfo>
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!-- Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!-- Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!-- Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!-- Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+      <!-- Windows 10 and Windows 11 -->
+      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+    </application>
+  </compatibility>
+  <application xmlns="urn:schemas-microsoft-com:asm.v3">
+    <windowsSettings
+        xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
+      <ws2:longPathAware>true</ws2:longPathAware>
+    </windowsSettings>
+  </application>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32"
+                        name="Microsoft.Windows.Common-Controls"
+                        version="6.0.0.0"
+                        processorArchitecture="*"
+                        publicKeyToken="6595b64144ccf1df"
+                        language="*" />
+    </dependentAssembly>
+  </dependency>
+</assembly>
+"""
 
     def initialize_options(self):
         build_ext.initialize_options(self)
-        self.long_paths_support = False
 
     def build_extensions(self):
         if os.name != 'nt':
@@ -700,8 +725,8 @@
 
         pythonlib = None
 
-        dir = os.path.dirname(self.get_ext_fullpath('dummy'))
-        self.hgtarget = os.path.join(dir, 'hg')
+        dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
+        self.hgtarget = os.path.join(dirname, 'hg')
 
         if getattr(sys, 'dllhandle', None):
             # Different Python installs can have different Python library
@@ -774,22 +799,11 @@
         self.compiler.link_executable(
             objects, self.hgtarget, libraries=[], output_dir=self.build_temp
         )
-        if self.long_paths_support:
-            self.addlongpathsmanifest()
+
+        self.addlongpathsmanifest()
 
     def addlongpathsmanifest(self):
-        r"""Add manifest pieces so that hg.exe understands long paths
-
-        This is an EXPERIMENTAL feature, use with care.
-        To enable long paths support, one needs to do two things:
-        - build Mercurial with --long-paths-support option
-        - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
-                 LongPathsEnabled to have value 1.
-
-        Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
-        it happens because Mercurial uses mt.exe circa 2008, which is not
-        yet aware of long paths support in the manifest (I think so at least).
-        This does not stop mt.exe from embedding/merging the XML properly.
+        """Add manifest pieces so that hg.exe understands long paths
 
         Why resource #1 should be used for .exe manifests? I don't know and
         wasn't able to find an explanation for mortals. But it seems to work.
@@ -797,21 +811,18 @@
         exefname = self.compiler.executable_filename(self.hgtarget)
         fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
         os.close(fdauto)
-        with open(manfname, 'w') as f:
+        with open(manfname, 'w', encoding="UTF-8") as f:
             f.write(self.LONG_PATHS_MANIFEST)
         log.info("long paths manifest is written to '%s'" % manfname)
-        inputresource = '-inputresource:%s;#1' % exefname
         outputresource = '-outputresource:%s;#1' % exefname
         log.info("running mt.exe to update hg.exe's manifest in-place")
-        # supplying both -manifest and -inputresource to mt.exe makes
-        # it merge the embedded and supplied manifests in the -outputresource
+
         self.spawn(
             [
-                'mt.exe',
+                self.compiler.mt,
                 '-nologo',
                 '-manifest',
                 manfname,
-                inputresource,
                 outputresource,
             ]
         )
--- a/tests/run-tests.py	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/run-tests.py	Mon Oct 24 15:32:14 2022 +0200
@@ -48,7 +48,7 @@
 import collections
 import contextlib
 import difflib
-import distutils.version as version
+
 import errno
 import functools
 import json
@@ -72,6 +72,13 @@
 import uuid
 import xml.dom.minidom as minidom
 
+try:
+    # PEP 632 recommend the use of `packaging.version` to replace the
+    # deprecated `distutil.version`. So lets do it.
+    import packaging.version as version
+except ImportError:
+    import distutils.version as version
+
 if sys.version_info < (3, 5, 0):
     print(
         '%s is only supported on Python 3.5+, not %s'
@@ -3437,6 +3444,7 @@
             if self.options.list_tests:
                 result = runner.listtests(suite)
             else:
+                install_start_time = time.monotonic()
                 self._usecorrectpython()
                 if self._installdir:
                     self._installhg()
@@ -3450,6 +3458,11 @@
                 elif self.options.pyoxidized:
                     self._build_pyoxidized()
                 self._use_correct_mercurial()
+                install_end_time = time.monotonic()
+                if self._installdir:
+                    msg = 'installed Mercurial in %.2f seconds'
+                    msg %= install_end_time - install_start_time
+                    log(msg)
 
                 log(
                     'running %d tests using %d parallel processes'
--- a/tests/test-bisect2.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-bisect2.t	Mon Oct 24 15:32:14 2022 +0200
@@ -784,7 +784,6 @@
   $ hg log -q -r 'bisect(pruned)'
   0:33b1f9bc8bc5
   1:4ca5088da217
-  2:051e12f87bf1
   8:dab8161ac8fc
   11:82ca6f06eccd
   12:9f259202bbe7
--- a/tests/test-bundle.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-bundle.t	Mon Oct 24 15:32:14 2022 +0200
@@ -718,7 +718,7 @@
   $ hg init empty
   $ hg -R test bundle --base null -r 0 ../0.hg
   1 changesets found
-  $ hg -R test bundle --base 0    -r 1 ../1.hg
+  $ hg -R test bundle --exact -r 1 ../1.hg
   1 changesets found
   $ hg -R empty unbundle -u ../0.hg ../1.hg
   adding changesets
--- a/tests/test-chg.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-chg.t	Mon Oct 24 15:32:14 2022 +0200
@@ -432,6 +432,20 @@
   YYYY/MM/DD HH:MM:SS (PID)> log -R cached
   YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in  ...s)
 
+Test that -R is interpreted relative to --cwd.
+
+  $ hg init repo1
+  $ mkdir -p a/b
+  $ hg init a/b/repo2
+  $ printf "[alias]\ntest=repo1\n" >> repo1/.hg/hgrc
+  $ printf "[alias]\ntest=repo2\n" >> a/b/repo2/.hg/hgrc
+  $ cd a
+  $ chg --cwd .. -R repo1 show alias.test
+  repo1
+  $ chg --cwd . -R b/repo2 show alias.test
+  repo2
+  $ cd ..
+
 Test that chg works (sets to the user's actual LC_CTYPE) even when python
 "coerces" the locale (py3.7+)
 
--- a/tests/test-completion.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-completion.t	Mon Oct 24 15:32:14 2022 +0200
@@ -261,7 +261,7 @@
   bookmarks: force, rev, delete, rename, inactive, list, template
   branch: force, clean, rev
   branches: active, closed, rev, template
-  bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
+  bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
   cat: output, rev, decode, include, exclude, template
   clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
   commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
--- a/tests/test-contrib-perf.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-contrib-perf.t	Mon Oct 24 15:32:14 2022 +0200
@@ -96,6 +96,7 @@
    perf::branchmapupdate
                  benchmark branchmap update from for <base> revs to <target>
                  revs
+   perf::bundle  benchmark the creation of a bundle from a repository
    perf::bundleread
                  Benchmark reading of bundle files.
    perf::cca     (no help text available)
@@ -105,6 +106,9 @@
                  (no help text available)
    perf::ctxfiles
                  (no help text available)
+   perf::delta-find
+                 benchmark the process of finding a valid delta for a revlog
+                 revision
    perf::diffwd  Profile diff of working directory changes
    perf::dirfoldmap
                  benchmap a 'dirstate._map.dirfoldmap.get()' request
@@ -187,6 +191,8 @@
    perf::tags    (no help text available)
    perf::templating
                  test the rendering time of a given template
+   perf::unbundle
+                 benchmark application of a bundle in a repository.
    perf::unidiff
                  benchmark a unified diff between revisions
    perf::volatilesets
@@ -385,6 +391,11 @@
   searching for changes
   searching for changes
   searching for changes
+  $ hg perf::bundle 'last(all(), 5)'
+  $ hg bundle --exact --rev 'last(all(), 5)' last-5.hg
+  4 changesets found
+  $ hg perf::unbundle last-5.hg
+
 
 test  profile-benchmark option
 ------------------------------
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-contrib-pull-logger.t	Mon Oct 24 15:32:14 2022 +0200
@@ -0,0 +1,78 @@
+Check that the pull logger plugins logs pulls
+=============================================
+
+Enable the extension
+
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "pull-logger = $TESTDIR/../contrib/pull_logger.py" >> $HGRCPATH
+
+
+Check the format of the generated log entries, with a bunch of elements in the
+common and heads set
+
+  $ hg init server
+  $ hg -R server debugbuilddag '.*2+2'
+  $ hg clone ssh://user@dummy/server client --rev 0
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b
+  updating to branch default
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ tail -1 server/.hg/pull_log.jsonl
+  {"common": ["0000000000000000000000000000000000000000"], "heads": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e"], "logger_version": 0, "timestamp": *} (glob)
+  $ hg -R client pull --rev 1 --rev 2
+  pulling from ssh://user@dummy/server
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 0 changes to 0 files (+1 heads)
+  new changesets d8736c3a2c84:fa28e81e283b
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+  $ tail -1 server/.hg/pull_log.jsonl
+  {"common": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e"], "heads": ["d8736c3a2c84ee759a2821385804bcb67f266ade", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "logger_version": 0, "timestamp": *} (glob)
+  $ hg -R client pull --rev 2 --rev 3
+  pulling from ssh://user@dummy/server
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files
+  new changesets 944641ddcaef
+  (run 'hg update' to get a working copy)
+  $ tail -1 server/.hg/pull_log.jsonl
+  {"common": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "heads": ["944641ddcaef174df7ce1bc2751a5f165129778b", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "logger_version": 0, "timestamp": *} (glob)
+
+
+Check the number of entries generated in the log when pulling from multiple
+clients at the same time
+
+  $ rm -f server/.hg/pull_log.jsonl
+  $ for i in $($TESTDIR/seq.py 32); do
+  >   hg clone ssh://user@dummy/server client_$i --rev 0
+  > done > /dev/null
+  $ for i in $($TESTDIR/seq.py 32); do
+  >   hg -R client_$i pull --rev 1 &
+  > done > /dev/null
+  $ wait
+  $ wc -l server/.hg/pull_log.jsonl
+  \s*64 .* (re)
+
+
+Test log rotation when reaching some size threshold
+
+  $ cat >> $HGRCPATH << EOF
+  > [pull-logger]
+  > rotate-size = 1kb
+  > EOF
+
+  $ rm -f server/.hg/pull_log.jsonl
+  $ for i in $($TESTDIR/seq.py 10); do
+  >   hg -R client pull --rev 1
+  > done > /dev/null
+  $ wc -l server/.hg/pull_log.jsonl
+  \s*3 .* (re)
+  $ wc -l server/.hg/pull_log.jsonl.rotated
+  \s*7 .* (re)
--- a/tests/test-http-bad-server.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-http-bad-server.t	Mon Oct 24 15:32:14 2022 +0200
@@ -659,7 +659,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   requesting all changes
-  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response*) (glob)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
@@ -703,7 +703,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   requesting all changes
-  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response*) (glob)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
@@ -904,7 +904,7 @@
   adding changesets
   transaction abort!
   rollback completed
-  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response*) (glob)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
@@ -1021,7 +1021,7 @@
   adding file changes
   transaction abort!
   rollback completed
-  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response*) (glob)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
--- a/tests/test-log.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-log.t	Mon Oct 24 15:32:14 2022 +0200
@@ -2157,6 +2157,8 @@
   ... '''.encode('utf-8')) and None
   $ sh < setup.sh
 
+#if no-rhg
+
 test in problematic encoding
   >>> with open('test.sh', 'wb') as f:
   ...     f.write(u'''
@@ -2179,6 +2181,8 @@
   3
   1
 
+#endif
+
   $ cd ..
 
 test hg log on non-existent files and on directories
--- a/tests/test-mq-subrepo-svn.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-mq-subrepo-svn.t	Mon Oct 24 15:32:14 2022 +0200
@@ -38,7 +38,7 @@
   A .hgsub
   $ hg qnew -m0 0.diff
   $ cd sub
-  $ echo a > a
+  $ echo foo > a
   $ svn add a
   A         a
   $ svn st
--- a/tests/test-phase-archived.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-phase-archived.t	Mon Oct 24 15:32:14 2022 +0200
@@ -4,7 +4,7 @@
 
   $ cat << EOF >> $HGRCPATH
   > [format]
-  > internal-phase=yes
+  > exp-archived-phase=yes
   > [extensions]
   > strip=
   > [experimental]
--- a/tests/test-phases.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-phases.t	Mon Oct 24 15:32:14 2022 +0200
@@ -879,7 +879,7 @@
 
 Check we deny its usage on older repository
 
-  $ hg init no-internal-phase --config format.internal-phase=no
+  $ hg init no-internal-phase --config format.use-internal-phase=no
   $ cd no-internal-phase
   $ hg debugrequires | grep internal-phase
   [1]
@@ -900,10 +900,10 @@
 
 Check it works fine with repository that supports it.
 
-  $ hg init internal-phase --config format.internal-phase=yes
+  $ hg init internal-phase --config format.use-internal-phase=yes
   $ cd internal-phase
   $ hg debugrequires | grep internal-phase
-  internal-phase
+  internal-phase-2
   $ mkcommit A
   test-debug-phase: new rev 0:  x -> 1
   test-hook-close-phase: 4a2df7238c3b48766b5e22fafbb8a2f506ec8256:   -> draft
@@ -951,21 +951,28 @@
 
 Commit an archived changesets
 
+  $ cd ..
+  $ hg clone --quiet --pull internal-phase archived-phase \
+  > --config format.exp-archived-phase=yes \
+  > --config extensions.phasereport='!' \
+  > --config hooks.txnclose-phase.test=
+
+  $ cd archived-phase
+
   $ echo B > B
   $ hg add B
   $ hg status
   A B
   $ hg --config "phases.new-commit=archived" commit -m "my test archived commit"
-  test-debug-phase: new rev 2:  x -> 32
+  test-debug-phase: new rev 1:  x -> 32
   test-hook-close-phase: 8df5997c3361518f733d1ae67cd3adb9b0eaf125:   -> archived
 
 The changeset is a working parent descendant.
 Per the usual visibility rules, it is made visible.
 
   $ hg log -G -l 3
-  @  changeset:   2:8df5997c3361
+  @  changeset:   1:8df5997c3361
   |  tag:         tip
-  |  parent:      0:4a2df7238c3b
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     my test archived commit
--- a/tests/test-releasenotes-formatting.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-releasenotes-formatting.t	Mon Oct 24 15:32:14 2022 +0200
@@ -387,6 +387,8 @@
 
   $ touch a
   $ hg -q commit -A -l - << EOF
+  > commit 2
+  > 
   > .. asf::
   > 
   >    First paragraph under this admonition.
@@ -395,7 +397,7 @@
 Suggest similar admonition in place of the invalid one.
 
   $ hg releasenotes -r . -c
-  Invalid admonition 'asf' present in changeset 4026fe9e1c20
+  Invalid admonition 'asf' present in changeset 99fa3c800c5e
 
   $ touch b
   $ hg -q commit -A -l - << EOF
@@ -405,7 +407,7 @@
   > EOF
 
   $ hg releasenotes -r . -c
-  Invalid admonition 'fixes' present in changeset 0e7130d2705c
+  Invalid admonition 'fixes' present in changeset 4737b1b5afd1
   (did you mean fix?)
 
   $ cd ..
--- a/tests/test-revset.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-revset.t	Mon Oct 24 15:32:14 2022 +0200
@@ -2974,6 +2974,25 @@
   1 b11  m12  u111 112 7200
   0 b12  m111 u112 111 10800
 
+random sort
+
+  $ hg log --rev 'sort(all(), "random")' | wc -l
+  \s*8 (re)
+  $ hg log --rev 'sort(all(), "-random")' | wc -l
+  \s*8 (re)
+  $ hg log --rev 'sort(all(), "random", random.seed=celeste)'
+  6 b111 t2   tu   130 0
+  7 b111 t3   tu   130 0
+  4 b111 m112 u111 110 14400
+  3 b112 m111 u11  120 0
+  5 b111 t1   tu   130 0
+  0 b12  m111 u112 111 10800
+  1 b11  m12  u111 112 7200
+  2 b111 m11  u12  111 3600
+  $ hg log --rev 'first(sort(all(), "random", random.seed=celeste))'
+  6 b111 t2   tu   130 0
+
+
 topographical sorting can't be combined with other sort keys, and you can't
 use the topo.firstbranch option when topo sort is not active:
 
--- a/tests/test-revset2.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-revset2.t	Mon Oct 24 15:32:14 2022 +0200
@@ -1481,6 +1481,20 @@
   $ hg init namedbranch
   $ cd namedbranch
 
+  $ log 'roots(.)'
+  -1
+  $ log 'roots(. or wdir())'
+  -1
+  $ log 'roots(wdir())'
+  2147483647
+  $ log 'sort(., -topo)'
+  -1
+  $ log 'sort(. or wdir(), -topo)'
+  -1
+  2147483647
+  $ log 'sort(wdir(), -topo)'
+  2147483647
+
   $ echo default0 >> a
   $ hg ci -Aqm0
   $ echo default1 >> a
@@ -1498,6 +1512,17 @@
   $ echo default5 >> a
   $ hg ci -m5
 
+  $ log 'roots(. or wdir())'
+  5
+  $ log 'roots(wdir())'
+  2147483647
+  $ log 'sort(. or wdir() or .^, -topo)'
+  4
+  5
+  2147483647
+  $ log 'sort(wdir(), -topo)'
+  2147483647
+
 "null" revision belongs to "default" branch (issue4683)
 
   $ log 'branch(null)'
--- a/tests/test-rhg-sparse-narrow.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-rhg-sparse-narrow.t	Mon Oct 24 15:32:14 2022 +0200
@@ -85,15 +85,12 @@
   dir1/x
   dir1/y
 
-Hg status needs to do some filtering based on narrow spec, so we don't
-support it in rhg for narrow clones yet.
+Hg status needs to do some filtering based on narrow spec
 
   $ mkdir dir2
   $ touch dir2/q
   $ "$real_hg" status
   $ $NO_FALLBACK rhg --config rhg.status=true status
-  unsupported feature: rhg status is not supported for sparse checkouts or narrow clones yet
-  [252]
 
 Adding "orphaned" index files:
 
--- a/tests/test-shelve.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-shelve.t	Mon Oct 24 15:32:14 2022 +0200
@@ -24,7 +24,7 @@
 
   $ cat <<EOF >> $HGRCPATH
   > [format]
-  > internal-phase = yes
+  > use-internal-phase = yes
   > EOF
 
 #endif
@@ -253,12 +253,12 @@
 (this also tests that same timestamp prevents backups from being
 removed, even though there are more than 'maxbackups' backups)
 
-  $ f -t .hg/shelve-backup/default.patch
-  .hg/shelve-backup/default.patch: file
-  $ touch -t 200001010000 .hg/shelve-backup/default.patch
-  $ f -t .hg/shelve-backup/default-1.patch
-  .hg/shelve-backup/default-1.patch: file
-  $ touch -t 200001010000 .hg/shelve-backup/default-1.patch
+  $ f -t .hg/shelve-backup/default.shelve
+  .hg/shelve-backup/default.shelve: file
+  $ touch -t 200001010000 .hg/shelve-backup/default.shelve
+  $ f -t .hg/shelve-backup/default-1.shelve
+  .hg/shelve-backup/default-1.shelve: file
+  $ touch -t 200001010000 .hg/shelve-backup/default-1.shelve
 
   $ hg unshelve
   unshelving change 'default-01'
@@ -1544,4 +1544,87 @@
   $ hg update -q --clean .
   $ hg patch -p1 test_patch.patch
   applying test_patch.patch
+
+  $ hg strip -q -r .
 #endif
+
+Check the comment of the last commit for consistency
+
+  $ hg log -r . --template '{desc}\n'
+  add C to bars
+
+-- if phasebased, shelve works without patch and bundle
+
+  $ hg update -q --clean .
+  $ rm -r .hg/shelve*
+  $ echo import antigravity >> somefile.py
+  $ hg add somefile.py
+  $ hg shelve -q
+#if phasebased
+  $ rm .hg/shelved/default.hg
+  $ rm .hg/shelved/default.patch
+#endif
+
+shelve --list --patch should work even with no patch file.
+
+  $ hg shelve --list --patch
+  default         (*s ago) * changes to: add C to bars (glob)
+  
+  diff --git a/somefile.py b/somefile.py
+  new file mode 100644
+  --- /dev/null
+  +++ b/somefile.py
+  @@ -0,0 +1,1 @@
+  +import antigravity
+
+  $ hg unshelve
+  unshelving change 'default'
+
+#if phasebased
+  $ ls .hg/shelve-backup
+  default.shelve
+#endif
+
+#if stripbased
+  $ ls .hg/shelve-backup
+  default.hg
+  default.patch
+  default.shelve
+#endif
+
+
+-- allow for phase-based shelves to be disabled
+
+  $ hg update -q --clean .
+  $ hg strip -q --hidden -r 0
+  $ rm -r .hg/shelve*
+
+#if phasebased
+  $ cat <<EOF >> $HGRCPATH
+  > [shelve]
+  > store = strip
+  > EOF
+#endif
+
+  $ echo import this >> somefile.py
+  $ hg add somefile.py
+  $ hg shelve -q
+  $ hg log --hidden
+  $ ls .hg/shelved
+  default.hg
+  default.patch
+  default.shelve
+  $ hg unshelve -q
+
+Override the disabling, re-enabling phase-based shelves
+
+  $ hg shelve --config shelve.store=internal -q
+
+#if phasebased
+  $ hg log --hidden --template '{user}\n'
+  shelve@localhost
+#endif
+
+#if stripbased
+  $ hg log --hidden --template '{user}\n'
+#endif
--- a/tests/test-shelve2.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-shelve2.t	Mon Oct 24 15:32:14 2022 +0200
@@ -26,7 +26,7 @@
 
   $ cat <<EOF >> $HGRCPATH
   > [format]
-  > internal-phase = yes
+  > use-internal-phase = yes
   > EOF
 
 #endif
--- a/tests/test-status.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-status.t	Mon Oct 24 15:32:14 2022 +0200
@@ -637,9 +637,16 @@
   M a
     b
   R b
+  $ hg st --config ui.statuscopies=true --no-copies
+  M a
+  R b
   $ hg st --config ui.statuscopies=false
   M a
   R b
+  $ hg st --config ui.statuscopies=false --copies
+  M a
+    b
+  R b
   $ hg st --config ui.tweakdefaults=yes
   M a
     b
--- a/tests/test-subrepo-svn.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-subrepo-svn.t	Mon Oct 24 15:32:14 2022 +0200
@@ -591,7 +591,7 @@
   $ cd "$WCROOT"
   $ svn up > /dev/null
   $ mkdir trunk/subdir branches
-  $ echo a > trunk/subdir/a
+  $ echo foo > trunk/subdir/a
   $ svn add trunk/subdir branches
   A         trunk/subdir
   A         trunk/subdir/a
--- a/tests/test-template-functions.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-template-functions.t	Mon Oct 24 15:32:14 2022 +0200
@@ -1718,4 +1718,19 @@
   $ hg log -T "{config('templateconfig', 'knob', if(true, 'foo', 'bar'))}\n"
   foo
 
+reverse filter:
+
+  $ hg log -T "{'abc\ndef\nghi'|splitlines|reverse}\n"
+  ghi def abc
+
+  $ hg log -T "{'abc'|reverse}\n"
+  hg: parse error: not reversible
+  (incompatible use of template filter 'reverse')
+  [10]
+
+  $ hg log -T "{date|reverse}\n"
+  hg: parse error: not reversible
+  (template filter 'reverse' is not compatible with keyword 'date')
+  [10]
+
   $ cd ..
--- a/tests/test-update-branches.t	Thu Oct 20 12:05:17 2022 -0400
+++ b/tests/test-update-branches.t	Mon Oct 24 15:32:14 2022 +0200
@@ -633,6 +633,10 @@
   # 
   # To mark files as resolved:  hg resolve --mark FILE
   
+  $ hg status -T '{status} {path} - {relpath(path)}\n'
+  M foo - foo
+   a - a
+
   $ hg status -Tjson
   [
    {