changeset 49664:f56873a7284c

branching: merge stable into default
author Raphaël Gomès <rgomes@octobus.net>
date Fri, 25 Nov 2022 15:14:40 +0100
parents 45d7b8c380d7 (diff) c6f0bcb7bc57 (current diff)
children 4bd12c0f531e
files mercurial/help.py
diffstat 119 files changed, 7586 insertions(+), 2152 deletions(-) [+]
line wrap: on
line diff
--- a/.gitlab/merge_request_templates/Default.md	Thu Nov 24 10:34:34 2022 +0100
+++ b/.gitlab/merge_request_templates/Default.md	Fri Nov 25 15:14:40 2022 +0100
@@ -1,5 +1,8 @@
 /assign_reviewer @mercurial.review
 
+
+<!--
+
 Welcome to the Mercurial Merge Request creation process:
 
 * Set a simple title for your MR,
@@ -11,3 +14,5 @@
 
 * https://www.mercurial-scm.org/wiki/ContributingChanges
 * https://www.mercurial-scm.org/wiki/Heptapod
+
+-->
--- a/contrib/check-code.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/contrib/check-code.py	Fri Nov 25 15:14:40 2022 +0100
@@ -372,10 +372,6 @@
         ),
         (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="),
         (
-            r'\([^()]*( =[^=]|[^<>!=]= )',
-            "no whitespace around = for named parameters",
-        ),
-        (
             r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
             "don't use old-style two-argument raise, use Exception(message)",
         ),
--- a/contrib/check-pytype.sh	Thu Nov 24 10:34:34 2022 +0100
+++ b/contrib/check-pytype.sh	Fri Nov 25 15:14:40 2022 +0100
@@ -70,3 +70,6 @@
     -x mercurial/wireprotoframing.py \
     -x mercurial/wireprotov1peer.py \
     -x mercurial/wireprotov1server.py
+
+echo 'pytype crashed while generating the following type stubs:'
+find .pytype/pyi -name '*.pyi' | xargs grep -l '# Caught error' | sort
--- a/contrib/heptapod-ci.yml	Thu Nov 24 10:34:34 2022 +0100
+++ b/contrib/heptapod-ci.yml	Fri Nov 25 15:14:40 2022 +0100
@@ -89,7 +89,8 @@
       - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
       - cd /tmp/mercurial-ci/
       - make local PYTHON=$PYTHON
-      - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.03.29
+      - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.11.18
+      - ./contrib/setup-pytype.sh
     script:
       - echo "Entering script section"
       - sh contrib/check-pytype.sh
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/setup-pytype.sh	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+set -e
+set -u
+
+# Find the python3 setup that would run pytype
+PYTYPE=`which pytype`
+PYTHON3=`head -n1 ${PYTYPE} | sed -s 's/#!//'`
+
+# Existing stubs that pytype processes live here
+TYPESHED=$(${PYTHON3} -c "import pytype; print(pytype.__path__[0])")/typeshed/stubs
+HG_STUBS=${TYPESHED}/mercurial
+
+echo "Patching typeshed at $HG_STUBS"
+
+rm -rf ${HG_STUBS}
+mkdir -p ${HG_STUBS}
+
+cat > ${HG_STUBS}/METADATA.toml <<EOF
+version = "0.1"
+EOF
+
+
+mkdir -p ${HG_STUBS}/mercurial/cext ${HG_STUBS}/mercurial/thirdparty/attr
+
+touch ${HG_STUBS}/mercurial/__init__.pyi
+touch ${HG_STUBS}/mercurial/cext/__init__.pyi
+touch ${HG_STUBS}/mercurial/thirdparty/__init__.pyi
+
+ln -sf $(hg root)/mercurial/cext/*.{pyi,typed} \
+       ${HG_STUBS}/mercurial/cext
+ln -sf $(hg root)/mercurial/thirdparty/attr/*.{pyi,typed} \
+       ${HG_STUBS}/mercurial/thirdparty/attr
--- a/hgdemandimport/demandimportpy3.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgdemandimport/demandimportpy3.py	Fri Nov 25 15:14:40 2022 +0100
@@ -23,8 +23,6 @@
   enabled.
 """
 
-# This line is unnecessary, but it satisfies test-check-py3-compat.t.
-
 import contextlib
 import importlib.util
 import sys
@@ -43,6 +41,10 @@
         """Make the module load lazily."""
         with tracing.log('demandimport %s', module):
             if _deactivated or module.__name__ in ignores:
+                # Reset the loader on the module as super() does (issue6725)
+                module.__spec__.loader = self.loader
+                module.__loader__ = self.loader
+
                 self.loader.exec_module(module)
             else:
                 super().exec_module(module)
--- a/hgext/fsmonitor/pywatchman/__init__.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/__init__.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 import inspect
 import math
 import os
--- a/hgext/fsmonitor/pywatchman/capabilities.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/capabilities.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 
 def parse_version(vstr):
     res = 0
--- a/hgext/fsmonitor/pywatchman/compat.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/compat.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 import sys
 
 
--- a/hgext/fsmonitor/pywatchman/encoding.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/encoding.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 import sys
 
 from . import compat
--- a/hgext/fsmonitor/pywatchman/load.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/load.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 import ctypes
 
 
--- a/hgext/fsmonitor/pywatchman/pybser.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/fsmonitor/pywatchman/pybser.py	Fri Nov 25 15:14:40 2022 +0100
@@ -26,8 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-# no unicode literals
-
 import binascii
 import collections
 import ctypes
--- a/hgext/lfs/blobstore.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/lfs/blobstore.py	Fri Nov 25 15:14:40 2022 +0100
@@ -168,12 +168,16 @@
             # producing the response (but the server has no way of telling us
             # that), and we really don't need to try to write the response to
             # the localstore, because it's not going to match the expected.
+            # The server also uses this method to store data uploaded by the
+            # client, so if this happens on the server side, it's possible
+            # that the client crashed or an antivirus interfered with the
+            # upload.
             if content_length is not None and int(content_length) != size:
                 msg = (
                     b"Response length (%d) does not match Content-Length "
-                    b"header (%d): likely server-side crash"
+                    b"header (%d) for %s"
                 )
-                raise LfsRemoteError(_(msg) % (size, int(content_length)))
+                raise LfsRemoteError(_(msg) % (size, int(content_length), oid))
 
             realoid = hex(sha256.digest())
             if realoid != oid:
--- a/hgext/remotefilelog/remotefilelog.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/remotefilelog/remotefilelog.py	Fri Nov 25 15:14:40 2022 +0100
@@ -299,6 +299,7 @@
         deltaprevious=False,
         deltamode=None,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         # we don't use any of these parameters here
         del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious
--- a/hgext/sqlitestore.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/hgext/sqlitestore.py	Fri Nov 25 15:14:40 2022 +0100
@@ -589,6 +589,7 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         if nodesorder not in (b'nodes', b'storage', b'linear', None):
             raise error.ProgrammingError(
--- a/mercurial/bundle2.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/bundle2.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1938,7 +1938,12 @@
             raise error.Abort(
                 _(b'old bundle types only supports v1 changegroups')
             )
+
+        # HG20 is the case without 2 values to unpack, but is handled above.
+        # pytype: disable=bad-unpacking
         header, comp = bundletypes[bundletype]
+        # pytype: enable=bad-unpacking
+
         if comp not in util.compengines.supportedbundletypes:
             raise error.Abort(_(b'unknown stream compression type: %s') % comp)
         compengine = util.compengines.forbundletype(comp)
--- a/mercurial/bundlecaches.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/bundlecaches.py	Fri Nov 25 15:14:40 2022 +0100
@@ -5,6 +5,10 @@
 
 import collections
 
+from typing import (
+    cast,
+)
+
 from .i18n import _
 
 from .thirdparty import attr
@@ -247,7 +251,7 @@
     # required to apply it. If we see this metadata, compare against what the
     # repo supports and error if the bundle isn't compatible.
     if version == b'packed1' and b'requirements' in params:
-        requirements = set(params[b'requirements'].split(b','))
+        requirements = set(cast(bytes, params[b'requirements']).split(b','))
         missingreqs = requirements - requirementsmod.STREAM_FIXED_REQUIREMENTS
         if missingreqs:
             raise error.UnsupportedBundleSpecification(
--- a/mercurial/cext/bdiff.pyi	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/cext/bdiff.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -5,7 +5,7 @@
 
 version: int
 
-def bdiff(a: bytes, b: bytes): bytes
+def bdiff(a: bytes, b: bytes) -> bytes: ...
 def blocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]: ...
 def fixws(s: bytes, allws: bool) -> bytes: ...
 def splitnewlines(text: bytes) -> List[bytes]: ...
--- a/mercurial/cext/osutil.pyi	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/cext/osutil.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -2,6 +2,7 @@
     AnyStr,
     IO,
     List,
+    Optional,
     Sequence,
 )
 
@@ -15,7 +16,7 @@
     st_mtime: int
     st_ctime: int
 
-def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
+def listdir(path: bytes, st: bool, skip: Optional[bool]) -> List[stat]: ...
 def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
 def statfiles(names: Sequence[bytes]) -> List[stat]: ...
 def setprocname(name: bytes) -> None: ...
--- a/mercurial/cext/parsers.pyi	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/cext/parsers.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -76,3 +76,7 @@
 
     def insert(self, rev: int) -> None: ...
     def shortest(self, node: bytes) -> int: ...
+
+# The IndexObject type here is defined in C, and there's no type for a buffer
+# return, as of py3.11.  https://github.com/python/typing/issues/593
+def parse_index2(data: object, inline: object, format: int = ...) -> Tuple[object, Optional[Tuple[int, object]]]: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/py.typed	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,1 @@
+partial
--- a/mercurial/cffi/bdiff.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/cffi/bdiff.py	Fri Nov 25 15:14:40 2022 +0100
@@ -8,6 +8,11 @@
 
 import struct
 
+from typing import (
+    List,
+    Tuple,
+)
+
 from ..pure.bdiff import *
 from . import _bdiff  # pytype: disable=import-error
 
@@ -15,7 +20,7 @@
 lib = _bdiff.lib
 
 
-def blocks(sa, sb):
+def blocks(sa: bytes, sb: bytes) -> List[Tuple[int, int, int, int]]:
     a = ffi.new(b"struct bdiff_line**")
     b = ffi.new(b"struct bdiff_line**")
     ac = ffi.new(b"char[]", str(sa))
@@ -29,7 +34,7 @@
         count = lib.bdiff_diff(a[0], an, b[0], bn, l)
         if count < 0:
             raise MemoryError
-        rl = [None] * count
+        rl = [(0, 0, 0, 0)] * count
         h = l.next
         i = 0
         while h:
@@ -43,7 +48,7 @@
     return rl
 
 
-def bdiff(sa, sb):
+def bdiff(sa: bytes, sb: bytes) -> bytes:
     a = ffi.new(b"struct bdiff_line**")
     b = ffi.new(b"struct bdiff_line**")
     ac = ffi.new(b"char[]", str(sa))
--- a/mercurial/cffi/mpatch.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/cffi/mpatch.py	Fri Nov 25 15:14:40 2022 +0100
@@ -6,6 +6,8 @@
 # GNU General Public License version 2 or any later version.
 
 
+from typing import List
+
 from ..pure.mpatch import *
 from ..pure.mpatch import mpatchError  # silence pyflakes
 from . import _mpatch  # pytype: disable=import-error
@@ -26,7 +28,7 @@
     return container[0]
 
 
-def patches(text, bins):
+def patches(text: bytes, bins: List[bytes]) -> bytes:
     lgt = len(bins)
     all = []
     if not lgt:
--- a/mercurial/changegroup.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/changegroup.py	Fri Nov 25 15:14:40 2022 +0100
@@ -105,6 +105,164 @@
                 os.unlink(cleanup)
 
 
+def _dbg_ubdl_line(
+    ui,
+    indent,
+    key,
+    base_value=None,
+    percentage_base=None,
+    percentage_key=None,
+):
+    """Print one line of debug_unbundle_debug_info"""
+    line = b"DEBUG-UNBUNDLING: "
+    line += b' ' * (2 * indent)
+    key += b":"
+    padding = b''
+    if base_value is not None:
+        assert len(key) + 1 + (2 * indent) <= _KEY_PART_WIDTH
+        line += key.ljust(_KEY_PART_WIDTH - (2 * indent))
+        if isinstance(base_value, float):
+            line += b"%14.3f seconds" % base_value
+        else:
+            line += b"%10d" % base_value
+            padding = b'            '
+    else:
+        line += key
+
+    if percentage_base is not None:
+        line += padding
+        padding = b''
+        assert base_value is not None
+        percentage = base_value * 100 // percentage_base
+        if percentage_key is not None:
+            line += b" (%3d%% of %s)" % (
+                percentage,
+                percentage_key,
+            )
+        else:
+            line += b" (%3d%%)" % percentage
+
+    line += b'\n'
+    ui.write_err(line)
+
+
+def _sumf(items):
+    # python < 3.8 does not support a `start=0.0` argument to sum
+    # So we have to cheat a bit until we drop support for those version
+    if not items:
+        return 0.0
+    return sum(items)
+
+
+def display_unbundle_debug_info(ui, debug_info):
+    """display an unbundling report from debug information"""
+    cl_info = []
+    mn_info = []
+    fl_info = []
+    _dispatch = [
+        (b'CHANGELOG:', cl_info),
+        (b'MANIFESTLOG:', mn_info),
+        (b'FILELOG:', fl_info),
+    ]
+    for e in debug_info:
+        for prefix, info in _dispatch:
+            if e["target-revlog"].startswith(prefix):
+                info.append(e)
+                break
+        else:
+            assert False, 'unreachable'
+    each_info = [
+        (b'changelog', cl_info),
+        (b'manifests', mn_info),
+        (b'files', fl_info),
+    ]
+
+    # General Revision Countss
+    _dbg_ubdl_line(ui, 0, b'revisions', len(debug_info))
+    for key, info in each_info:
+        if not info:
+            continue
+        _dbg_ubdl_line(ui, 1, key, len(info), len(debug_info))
+
+    # General Time spent
+    all_durations = [e['duration'] for e in debug_info]
+    all_durations.sort()
+    total_duration = _sumf(all_durations)
+    _dbg_ubdl_line(ui, 0, b'total-time', total_duration)
+
+    for key, info in each_info:
+        if not info:
+            continue
+        durations = [e['duration'] for e in info]
+        durations.sort()
+        _dbg_ubdl_line(ui, 1, key, _sumf(durations), total_duration)
+
+    # Count and cache reuse per delta types
+    each_types = {}
+    for key, info in each_info:
+        each_types[key] = types = {
+            b'full': 0,
+            b'full-cached': 0,
+            b'snapshot': 0,
+            b'snapshot-cached': 0,
+            b'delta': 0,
+            b'delta-cached': 0,
+            b'unknown': 0,
+            b'unknown-cached': 0,
+        }
+        for e in info:
+            types[e['type']] += 1
+            if e['using-cached-base']:
+                types[e['type'] + b'-cached'] += 1
+
+    EXPECTED_TYPES = (b'full', b'snapshot', b'delta', b'unknown')
+    if debug_info:
+        _dbg_ubdl_line(ui, 0, b'type-count')
+    for key, info in each_info:
+        if info:
+            _dbg_ubdl_line(ui, 1, key)
+        t = each_types[key]
+        for tn in EXPECTED_TYPES:
+            if t[tn]:
+                tc = tn + b'-cached'
+                _dbg_ubdl_line(ui, 2, tn, t[tn])
+                _dbg_ubdl_line(ui, 3, b'cached', t[tc], t[tn])
+
+    # time perf delta types and reuse
+    each_type_time = {}
+    for key, info in each_info:
+        each_type_time[key] = t = {
+            b'full': [],
+            b'full-cached': [],
+            b'snapshot': [],
+            b'snapshot-cached': [],
+            b'delta': [],
+            b'delta-cached': [],
+            b'unknown': [],
+            b'unknown-cached': [],
+        }
+        for e in info:
+            t[e['type']].append(e['duration'])
+            if e['using-cached-base']:
+                t[e['type'] + b'-cached'].append(e['duration'])
+        for t_key, value in list(t.items()):
+            value.sort()
+            t[t_key] = _sumf(value)
+
+    if debug_info:
+        _dbg_ubdl_line(ui, 0, b'type-time')
+    for key, info in each_info:
+        if info:
+            _dbg_ubdl_line(ui, 1, key)
+        t = each_type_time[key]
+        td = total_duration  # to same space on next lines
+        for tn in EXPECTED_TYPES:
+            if t[tn]:
+                tc = tn + b'-cached'
+                _dbg_ubdl_line(ui, 2, tn, t[tn], td, b"total")
+                _dbg_ubdl_line(ui, 3, b'cached', t[tc], td, b"total")
+
+
 class cg1unpacker:
     """Unpacker for cg1 changegroup streams.
 
@@ -254,7 +412,15 @@
                     pos = next
             yield closechunk()
 
-    def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
+    def _unpackmanifests(
+        self,
+        repo,
+        revmap,
+        trp,
+        prog,
+        addrevisioncb=None,
+        debug_info=None,
+    ):
         self.callback = prog.increment
         # no need to check for empty manifest group here:
         # if the result of the merge of 1 and 2 is the same in 3 and 4,
@@ -263,7 +429,13 @@
         self.manifestheader()
         deltas = self.deltaiter()
         storage = repo.manifestlog.getstorage(b'')
-        storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb)
+        storage.addgroup(
+            deltas,
+            revmap,
+            trp,
+            addrevisioncb=addrevisioncb,
+            debug_info=debug_info,
+        )
         prog.complete()
         self.callback = None
 
@@ -292,6 +464,10 @@
         """
         repo = repo.unfiltered()
 
+        debug_info = None
+        if repo.ui.configbool(b'debug', b'unbundling-stats'):
+            debug_info = []
+
         # Only useful if we're adding sidedata categories. If both peers have
         # the same categories, then we simply don't do anything.
         adding_sidedata = (
@@ -366,6 +542,7 @@
                 alwayscache=True,
                 addrevisioncb=onchangelog,
                 duplicaterevisioncb=ondupchangelog,
+                debug_info=debug_info,
             ):
                 repo.ui.develwarn(
                     b'applied empty changelog from changegroup',
@@ -413,6 +590,7 @@
                 trp,
                 progress,
                 addrevisioncb=on_manifest_rev,
+                debug_info=debug_info,
             )
 
             needfiles = {}
@@ -449,6 +627,7 @@
                 efiles,
                 needfiles,
                 addrevisioncb=on_filelog_rev,
+                debug_info=debug_info,
             )
 
             if sidedata_helpers:
@@ -567,6 +746,8 @@
                     b'changegroup-runhooks-%020i' % clstart,
                     lambda tr: repo._afterlock(runhooks),
                 )
+            if debug_info is not None:
+                display_unbundle_debug_info(repo.ui, debug_info)
         finally:
             repo.ui.flush()
         # never return 0 here:
@@ -626,9 +807,22 @@
         protocol_flags = 0
         return node, p1, p2, deltabase, cs, flags, protocol_flags
 
-    def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
+    def _unpackmanifests(
+        self,
+        repo,
+        revmap,
+        trp,
+        prog,
+        addrevisioncb=None,
+        debug_info=None,
+    ):
         super(cg3unpacker, self)._unpackmanifests(
-            repo, revmap, trp, prog, addrevisioncb=addrevisioncb
+            repo,
+            revmap,
+            trp,
+            prog,
+            addrevisioncb=addrevisioncb,
+            debug_info=debug_info,
         )
         for chunkdata in iter(self.filelogheader, {}):
             # If we get here, there are directory manifests in the changegroup
@@ -636,7 +830,11 @@
             repo.ui.debug(b"adding %s revisions\n" % d)
             deltas = self.deltaiter()
             if not repo.manifestlog.getstorage(d).addgroup(
-                deltas, revmap, trp, addrevisioncb=addrevisioncb
+                deltas,
+                revmap,
+                trp,
+                addrevisioncb=addrevisioncb,
+                debug_info=debug_info,
             ):
                 raise error.Abort(_(b"received dir revlog group is empty"))
 
@@ -869,6 +1067,7 @@
     fullclnodes=None,
     precomputedellipsis=None,
     sidedata_helpers=None,
+    debug_info=None,
 ):
     """Calculate deltas for a set of revisions.
 
@@ -978,6 +1177,7 @@
         assumehaveparentrevisions=not ellipses,
         deltamode=deltamode,
         sidedata_helpers=sidedata_helpers,
+        debug_info=debug_info,
     )
 
     for i, revision in enumerate(revisions):
@@ -1003,6 +1203,187 @@
         progress.complete()
 
 
+def make_debug_info():
+    """ "build a "new" debug_info dictionnary
+
+    That dictionnary can be used to gather information about the bundle process
+    """
+    return {
+        'revision-total': 0,
+        'revision-changelog': 0,
+        'revision-manifest': 0,
+        'revision-files': 0,
+        'file-count': 0,
+        'merge-total': 0,
+        'available-delta': 0,
+        'available-full': 0,
+        'delta-against-prev': 0,
+        'delta-full': 0,
+        'delta-against-p1': 0,
+        'denied-delta-candeltafn': 0,
+        'denied-base-not-available': 0,
+        'reused-storage-delta': 0,
+        'computed-delta': 0,
+    }
+
+
+def merge_debug_info(base, other):
+    """merge the debug information from <other> into <base>
+
+    This function can be used to gather lower level information into higher level ones.
+    """
+    for key in (
+        'revision-total',
+        'revision-changelog',
+        'revision-manifest',
+        'revision-files',
+        'merge-total',
+        'available-delta',
+        'available-full',
+        'delta-against-prev',
+        'delta-full',
+        'delta-against-p1',
+        'denied-delta-candeltafn',
+        'denied-base-not-available',
+        'reused-storage-delta',
+        'computed-delta',
+    ):
+        base[key] += other[key]
+
+
+_KEY_PART_WIDTH = 17
+
+
+def _dbg_bdl_line(
+    ui,
+    indent,
+    key,
+    base_value=None,
+    percentage_base=None,
+    percentage_key=None,
+    percentage_ref=None,
+    extra=None,
+):
+    """Print one line of debug_bundle_debug_info"""
+    line = b"DEBUG-BUNDLING: "
+    line += b' ' * (2 * indent)
+    key += b":"
+    if base_value is not None:
+        assert len(key) + 1 + (2 * indent) <= _KEY_PART_WIDTH
+        line += key.ljust(_KEY_PART_WIDTH - (2 * indent))
+        line += b"%10d" % base_value
+    else:
+        line += key
+
+    if percentage_base is not None:
+        assert base_value is not None
+        percentage = base_value * 100 // percentage_base
+        if percentage_key is not None:
+            line += b" (%d%% of %s %d)" % (
+                percentage,
+                percentage_key,
+                percentage_ref,
+            )
+        else:
+            line += b" (%d%%)" % percentage
+
+    if extra:
+        line += b" "
+        line += extra
+
+    line += b'\n'
+    ui.write_err(line)
+
+
+def display_bundling_debug_info(
+    ui,
+    debug_info,
+    cl_debug_info,
+    mn_debug_info,
+    fl_debug_info,
+):
+    """display debug information gathered during a bundling through `ui`"""
+    d = debug_info
+    c = cl_debug_info
+    m = mn_debug_info
+    f = fl_debug_info
+    all_info = [
+        (b"changelog", b"cl", c),
+        (b"manifests", b"mn", m),
+        (b"files", b"fl", f),
+    ]
+    _dbg_bdl_line(ui, 0, b'revisions', d['revision-total'])
+    _dbg_bdl_line(ui, 1, b'changelog', d['revision-changelog'])
+    _dbg_bdl_line(ui, 1, b'manifest', d['revision-manifest'])
+    extra = b'(for %d revlogs)' % d['file-count']
+    _dbg_bdl_line(ui, 1, b'files', d['revision-files'], extra=extra)
+    if d['merge-total']:
+        _dbg_bdl_line(ui, 1, b'merge', d['merge-total'], d['revision-total'])
+    for k, __, v in all_info:
+        if v['merge-total']:
+            _dbg_bdl_line(ui, 2, k, v['merge-total'], v['revision-total'])
+
+    _dbg_bdl_line(ui, 0, b'deltas')
+    _dbg_bdl_line(
+        ui,
+        1,
+        b'from-storage',
+        d['reused-storage-delta'],
+        percentage_base=d['available-delta'],
+        percentage_key=b"available",
+        percentage_ref=d['available-delta'],
+    )
+
+    if d['denied-delta-candeltafn']:
+        _dbg_bdl_line(ui, 2, b'denied-fn', d['denied-delta-candeltafn'])
+    for __, k, v in all_info:
+        if v['denied-delta-candeltafn']:
+            _dbg_bdl_line(ui, 3, k, v['denied-delta-candeltafn'])
+
+    if d['denied-base-not-available']:
+        _dbg_bdl_line(ui, 2, b'denied-nb', d['denied-base-not-available'])
+    for k, __, v in all_info:
+        if v['denied-base-not-available']:
+            _dbg_bdl_line(ui, 3, k, v['denied-base-not-available'])
+
+    if d['computed-delta']:
+        _dbg_bdl_line(ui, 1, b'computed', d['computed-delta'])
+
+    if d['available-full']:
+        _dbg_bdl_line(
+            ui,
+            2,
+            b'full',
+            d['delta-full'],
+            percentage_base=d['available-full'],
+            percentage_key=b"native",
+            percentage_ref=d['available-full'],
+        )
+    for k, __, v in all_info:
+        if v['available-full']:
+            _dbg_bdl_line(
+                ui,
+                3,
+                k,
+                v['delta-full'],
+                percentage_base=v['available-full'],
+                percentage_key=b"native",
+                percentage_ref=v['available-full'],
+            )
+
+    if d['delta-against-prev']:
+        _dbg_bdl_line(ui, 2, b'previous', d['delta-against-prev'])
+    for k, __, v in all_info:
+        if v['delta-against-prev']:
+            _dbg_bdl_line(ui, 3, k, v['delta-against-prev'])
+
+    if d['delta-against-p1']:
+        _dbg_bdl_line(ui, 2, b'parent-1', d['delta-against-prev'])
+    for k, __, v in all_info:
+        if v['delta-against-p1']:
+            _dbg_bdl_line(ui, 3, k, v['delta-against-p1'])
+
+
 class cgpacker:
     def __init__(
         self,
@@ -1086,13 +1467,21 @@
             self._verbosenote = lambda s: None
 
     def generate(
-        self, commonrevs, clnodes, fastpathlinkrev, source, changelog=True
+        self,
+        commonrevs,
+        clnodes,
+        fastpathlinkrev,
+        source,
+        changelog=True,
     ):
         """Yield a sequence of changegroup byte chunks.
         If changelog is False, changelog data won't be added to changegroup
         """
 
+        debug_info = None
         repo = self._repo
+        if repo.ui.configbool(b'debug', b'bundling-stats'):
+            debug_info = make_debug_info()
         cl = repo.changelog
 
         self._verbosenote(_(b'uncompressed size of bundle content:\n'))
@@ -1107,14 +1496,19 @@
                 # correctly advertise its sidedata categories directly.
                 remote_sidedata = repo._wanted_sidedata
             sidedata_helpers = sidedatamod.get_sidedata_helpers(
-                repo, remote_sidedata
+                repo,
+                remote_sidedata,
             )
 
+        cl_debug_info = None
+        if debug_info is not None:
+            cl_debug_info = make_debug_info()
         clstate, deltas = self._generatechangelog(
             cl,
             clnodes,
             generate=changelog,
             sidedata_helpers=sidedata_helpers,
+            debug_info=cl_debug_info,
         )
         for delta in deltas:
             for chunk in _revisiondeltatochunks(
@@ -1126,6 +1520,9 @@
         close = closechunk()
         size += len(close)
         yield closechunk()
+        if debug_info is not None:
+            merge_debug_info(debug_info, cl_debug_info)
+            debug_info['revision-changelog'] = cl_debug_info['revision-total']
 
         self._verbosenote(_(b'%8.i (changelog)\n') % size)
 
@@ -1133,6 +1530,9 @@
         manifests = clstate[b'manifests']
         changedfiles = clstate[b'changedfiles']
 
+        if debug_info is not None:
+            debug_info['file-count'] = len(changedfiles)
+
         # We need to make sure that the linkrev in the changegroup refers to
         # the first changeset that introduced the manifest or file revision.
         # The fastpath is usually safer than the slowpath, because the filelogs
@@ -1156,6 +1556,9 @@
         fnodes = {}  # needed file nodes
 
         size = 0
+        mn_debug_info = None
+        if debug_info is not None:
+            mn_debug_info = make_debug_info()
         it = self.generatemanifests(
             commonrevs,
             clrevorder,
@@ -1165,6 +1568,7 @@
             source,
             clstate[b'clrevtomanifestrev'],
             sidedata_helpers=sidedata_helpers,
+            debug_info=mn_debug_info,
         )
 
         for tree, deltas in it:
@@ -1185,6 +1589,9 @@
             close = closechunk()
             size += len(close)
             yield close
+        if debug_info is not None:
+            merge_debug_info(debug_info, mn_debug_info)
+            debug_info['revision-manifest'] = mn_debug_info['revision-total']
 
         self._verbosenote(_(b'%8.i (manifests)\n') % size)
         yield self._manifestsend
@@ -1199,6 +1606,9 @@
         manifests.clear()
         clrevs = {cl.rev(x) for x in clnodes}
 
+        fl_debug_info = None
+        if debug_info is not None:
+            fl_debug_info = make_debug_info()
         it = self.generatefiles(
             changedfiles,
             commonrevs,
@@ -1208,6 +1618,7 @@
             fnodes,
             clrevs,
             sidedata_helpers=sidedata_helpers,
+            debug_info=fl_debug_info,
         )
 
         for path, deltas in it:
@@ -1230,12 +1641,29 @@
             self._verbosenote(_(b'%8.i  %s\n') % (size, path))
 
         yield closechunk()
+        if debug_info is not None:
+            merge_debug_info(debug_info, fl_debug_info)
+            debug_info['revision-files'] = fl_debug_info['revision-total']
+
+        if debug_info is not None:
+            display_bundling_debug_info(
+                repo.ui,
+                debug_info,
+                cl_debug_info,
+                mn_debug_info,
+                fl_debug_info,
+            )
 
         if clnodes:
             repo.hook(b'outgoing', node=hex(clnodes[0]), source=source)
 
     def _generatechangelog(
-        self, cl, nodes, generate=True, sidedata_helpers=None
+        self,
+        cl,
+        nodes,
+        generate=True,
+        sidedata_helpers=None,
+        debug_info=None,
     ):
         """Generate data for changelog chunks.
 
@@ -1332,6 +1760,7 @@
             fullclnodes=self._fullclnodes,
             precomputedellipsis=self._precomputedellipsis,
             sidedata_helpers=sidedata_helpers,
+            debug_info=debug_info,
         )
 
         return state, gen
@@ -1346,6 +1775,7 @@
         source,
         clrevtolocalrev,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         """Returns an iterator of changegroup chunks containing manifests.
 
@@ -1444,6 +1874,7 @@
                 fullclnodes=self._fullclnodes,
                 precomputedellipsis=self._precomputedellipsis,
                 sidedata_helpers=sidedata_helpers,
+                debug_info=debug_info,
             )
 
             if not self._oldmatcher.visitdir(store.tree[:-1]):
@@ -1483,6 +1914,7 @@
         fnodes,
         clrevs,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         changedfiles = [
             f
@@ -1578,6 +2010,7 @@
                 fullclnodes=self._fullclnodes,
                 precomputedellipsis=self._precomputedellipsis,
                 sidedata_helpers=sidedata_helpers,
+                debug_info=debug_info,
             )
 
             yield fname, deltas
@@ -1867,7 +2300,12 @@
 
 
 def makechangegroup(
-    repo, outgoing, version, source, fastpath=False, bundlecaps=None
+    repo,
+    outgoing,
+    version,
+    source,
+    fastpath=False,
+    bundlecaps=None,
 ):
     cgstream = makestream(
         repo,
@@ -1917,7 +2355,12 @@
 
     repo.hook(b'preoutgoing', throw=True, source=source)
     _changegroupinfo(repo, csets, source)
-    return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
+    return bundler.generate(
+        commonrevs,
+        csets,
+        fastpathlinkrev,
+        source,
+    )
 
 
 def _addchangegroupfiles(
@@ -1928,6 +2371,7 @@
     expectedfiles,
     needfiles,
     addrevisioncb=None,
+    debug_info=None,
 ):
     revisions = 0
     files = 0
@@ -1948,6 +2392,7 @@
                 revmap,
                 trp,
                 addrevisioncb=addrevisioncb,
+                debug_info=debug_info,
             )
             if not added:
                 raise error.Abort(_(b"received file revlog group is empty"))
--- a/mercurial/configitems.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/configitems.py	Fri Nov 25 15:14:40 2022 +0100
@@ -588,6 +588,18 @@
     b'revlog.debug-delta',
     default=False,
 )
+# display extra information about the bundling process
+coreconfigitem(
+    b'debug',
+    b'bundling-stats',
+    default=False,
+)
+# display extra information about the unbundling process
+coreconfigitem(
+    b'debug',
+    b'unbundling-stats',
+    default=False,
+)
 coreconfigitem(
     b'defaults',
     b'.*',
@@ -911,6 +923,13 @@
     b'changegroup4',
     default=False,
 )
+
+# might remove rank configuration once the computation has no impact
+coreconfigitem(
+    b'experimental',
+    b'changelog-v2.compute-rank',
+    default=True,
+)
 coreconfigitem(
     b'experimental',
     b'cleanup-as-archived',
@@ -1774,6 +1793,13 @@
 )
 coreconfigitem(
     b'merge-tools',
+    br'.*\.regappend$',
+    default=b"",
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'merge-tools',
     br'.*\.symlink$',
     default=False,
     generic=True,
@@ -2023,6 +2049,11 @@
 )
 coreconfigitem(
     b'storage',
+    b'revlog.delta-parent-search.candidate-group-chunk-size',
+    default=10,
+)
+coreconfigitem(
+    b'storage',
     b'revlog.issue6528.fix-incoming',
     default=True,
 )
@@ -2123,7 +2154,7 @@
 coreconfigitem(
     b'server',
     b'pullbundle',
-    default=False,
+    default=True,
 )
 coreconfigitem(
     b'server',
--- a/mercurial/debugcommands.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/debugcommands.py	Fri Nov 25 15:14:40 2022 +0100
@@ -21,7 +21,6 @@
 import socket
 import ssl
 import stat
-import string
 import subprocess
 import sys
 import time
@@ -59,6 +58,7 @@
     localrepo,
     lock as lockmod,
     logcmdutil,
+    mdiff,
     mergestate as mergestatemod,
     metadata,
     obsolete,
@@ -990,17 +990,29 @@
 
 @command(
     b'debug-delta-find',
-    cmdutil.debugrevlogopts + cmdutil.formatteropts,
+    cmdutil.debugrevlogopts
+    + cmdutil.formatteropts
+    + [
+        (
+            b'',
+            b'source',
+            b'full',
+            _(b'input data feed to the process (full, storage, p1, p2, prev)'),
+        ),
+    ],
     _(b'-c|-m|FILE REV'),
     optionalrepo=True,
 )
-def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
+def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
     """display the computation to get to a valid delta for storing REV
 
     This command will replay the process used to find the "best" delta to store
     a revision and display information about all the steps used to get to that
     result.
 
+    By default, the process is fed with a the full-text for the revision. This
+    can be controlled with the --source flag.
+
     The revision use the revision number of the target storage (not changelog
     revision number).
 
@@ -1028,11 +1040,31 @@
     p1r, p2r = revlog.parentrevs(rev)
     p1 = revlog.node(p1r)
     p2 = revlog.node(p2r)
-    btext = [revlog.revision(rev)]
+    full_text = revlog.revision(rev)
+    btext = [full_text]
     textlen = len(btext[0])
     cachedelta = None
     flags = revlog.flags(rev)
 
+    if source != b'full':
+        if source == b'storage':
+            base_rev = revlog.deltaparent(rev)
+        elif source == b'p1':
+            base_rev = p1r
+        elif source == b'p2':
+            base_rev = p2r
+        elif source == b'prev':
+            base_rev = rev - 1
+        else:
+            raise error.InputError(b"invalid --source value: %s" % source)
+
+        if base_rev != nullrev:
+            base_text = revlog.revision(base_rev)
+            delta = mdiff.textdiff(base_text, full_text)
+
+            cachedelta = (base_rev, delta)
+            btext = [None]
+
     revinfo = revlogutils.revisioninfo(
         node,
         p1,
@@ -3207,348 +3239,10 @@
     r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
 
     if opts.get(b"dump"):
-        numrevs = len(r)
-        ui.write(
-            (
-                b"# rev p1rev p2rev start   end deltastart base   p1   p2"
-                b" rawsize totalsize compression heads chainlen\n"
-            )
-        )
-        ts = 0
-        heads = set()
-
-        for rev in range(numrevs):
-            dbase = r.deltaparent(rev)
-            if dbase == -1:
-                dbase = rev
-            cbase = r.chainbase(rev)
-            clen = r.chainlen(rev)
-            p1, p2 = r.parentrevs(rev)
-            rs = r.rawsize(rev)
-            ts = ts + rs
-            heads -= set(r.parentrevs(rev))
-            heads.add(rev)
-            try:
-                compression = ts / r.end(rev)
-            except ZeroDivisionError:
-                compression = 0
-            ui.write(
-                b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
-                b"%11d %5d %8d\n"
-                % (
-                    rev,
-                    p1,
-                    p2,
-                    r.start(rev),
-                    r.end(rev),
-                    r.start(dbase),
-                    r.start(cbase),
-                    r.start(p1),
-                    r.start(p2),
-                    rs,
-                    ts,
-                    compression,
-                    len(heads),
-                    clen,
-                )
-            )
-        return 0
-
-    format = r._format_version
-    v = r._format_flags
-    flags = []
-    gdelta = False
-    if v & revlog.FLAG_INLINE_DATA:
-        flags.append(b'inline')
-    if v & revlog.FLAG_GENERALDELTA:
-        gdelta = True
-        flags.append(b'generaldelta')
-    if not flags:
-        flags = [b'(none)']
-
-    ### tracks merge vs single parent
-    nummerges = 0
-
-    ### tracks ways the "delta" are build
-    # nodelta
-    numempty = 0
-    numemptytext = 0
-    numemptydelta = 0
-    # full file content
-    numfull = 0
-    # intermediate snapshot against a prior snapshot
-    numsemi = 0
-    # snapshot count per depth
-    numsnapdepth = collections.defaultdict(lambda: 0)
-    # delta against previous revision
-    numprev = 0
-    # delta against first or second parent (not prev)
-    nump1 = 0
-    nump2 = 0
-    # delta against neither prev nor parents
-    numother = 0
-    # delta against prev that are also first or second parent
-    # (details of `numprev`)
-    nump1prev = 0
-    nump2prev = 0
-
-    # data about delta chain of each revs
-    chainlengths = []
-    chainbases = []
-    chainspans = []
-
-    # data about each revision
-    datasize = [None, 0, 0]
-    fullsize = [None, 0, 0]
-    semisize = [None, 0, 0]
-    # snapshot count per depth
-    snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
-    deltasize = [None, 0, 0]
-    chunktypecounts = {}
-    chunktypesizes = {}
-
-    def addsize(size, l):
-        if l[0] is None or size < l[0]:
-            l[0] = size
-        if size > l[1]:
-            l[1] = size
-        l[2] += size
-
-    numrevs = len(r)
-    for rev in range(numrevs):
-        p1, p2 = r.parentrevs(rev)
-        delta = r.deltaparent(rev)
-        if format > 0:
-            addsize(r.rawsize(rev), datasize)
-        if p2 != nullrev:
-            nummerges += 1
-        size = r.length(rev)
-        if delta == nullrev:
-            chainlengths.append(0)
-            chainbases.append(r.start(rev))
-            chainspans.append(size)
-            if size == 0:
-                numempty += 1
-                numemptytext += 1
-            else:
-                numfull += 1
-                numsnapdepth[0] += 1
-                addsize(size, fullsize)
-                addsize(size, snapsizedepth[0])
-        else:
-            chainlengths.append(chainlengths[delta] + 1)
-            baseaddr = chainbases[delta]
-            revaddr = r.start(rev)
-            chainbases.append(baseaddr)
-            chainspans.append((revaddr - baseaddr) + size)
-            if size == 0:
-                numempty += 1
-                numemptydelta += 1
-            elif r.issnapshot(rev):
-                addsize(size, semisize)
-                numsemi += 1
-                depth = r.snapshotdepth(rev)
-                numsnapdepth[depth] += 1
-                addsize(size, snapsizedepth[depth])
-            else:
-                addsize(size, deltasize)
-                if delta == rev - 1:
-                    numprev += 1
-                    if delta == p1:
-                        nump1prev += 1
-                    elif delta == p2:
-                        nump2prev += 1
-                elif delta == p1:
-                    nump1 += 1
-                elif delta == p2:
-                    nump2 += 1
-                elif delta != nullrev:
-                    numother += 1
-
-        # Obtain data on the raw chunks in the revlog.
-        if util.safehasattr(r, b'_getsegmentforrevs'):
-            segment = r._getsegmentforrevs(rev, rev)[1]
-        else:
-            segment = r._revlog._getsegmentforrevs(rev, rev)[1]
-        if segment:
-            chunktype = bytes(segment[0:1])
-        else:
-            chunktype = b'empty'
-
-        if chunktype not in chunktypecounts:
-            chunktypecounts[chunktype] = 0
-            chunktypesizes[chunktype] = 0
-
-        chunktypecounts[chunktype] += 1
-        chunktypesizes[chunktype] += size
-
-    # Adjust size min value for empty cases
-    for size in (datasize, fullsize, semisize, deltasize):
-        if size[0] is None:
-            size[0] = 0
-
-    numdeltas = numrevs - numfull - numempty - numsemi
-    numoprev = numprev - nump1prev - nump2prev
-    totalrawsize = datasize[2]
-    datasize[2] /= numrevs
-    fulltotal = fullsize[2]
-    if numfull == 0:
-        fullsize[2] = 0
+        revlog_debug.dump(ui, r)
     else:
-        fullsize[2] /= numfull
-    semitotal = semisize[2]
-    snaptotal = {}
-    if numsemi > 0:
-        semisize[2] /= numsemi
-    for depth in snapsizedepth:
-        snaptotal[depth] = snapsizedepth[depth][2]
-        snapsizedepth[depth][2] /= numsnapdepth[depth]
-
-    deltatotal = deltasize[2]
-    if numdeltas > 0:
-        deltasize[2] /= numdeltas
-    totalsize = fulltotal + semitotal + deltatotal
-    avgchainlen = sum(chainlengths) / numrevs
-    maxchainlen = max(chainlengths)
-    maxchainspan = max(chainspans)
-    compratio = 1
-    if totalsize:
-        compratio = totalrawsize / totalsize
-
-    basedfmtstr = b'%%%dd\n'
-    basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
-
-    def dfmtstr(max):
-        return basedfmtstr % len(str(max))
-
-    def pcfmtstr(max, padding=0):
-        return basepcfmtstr % (len(str(max)), b' ' * padding)
-
-    def pcfmt(value, total):
-        if total:
-            return (value, 100 * float(value) / total)
-        else:
-            return value, 100.0
-
-    ui.writenoi18n(b'format : %d\n' % format)
-    ui.writenoi18n(b'flags  : %s\n' % b', '.join(flags))
-
-    ui.write(b'\n')
-    fmt = pcfmtstr(totalsize)
-    fmt2 = dfmtstr(totalsize)
-    ui.writenoi18n(b'revisions     : ' + fmt2 % numrevs)
-    ui.writenoi18n(b'    merges    : ' + fmt % pcfmt(nummerges, numrevs))
-    ui.writenoi18n(
-        b'    normal    : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
-    )
-    ui.writenoi18n(b'revisions     : ' + fmt2 % numrevs)
-    ui.writenoi18n(b'    empty     : ' + fmt % pcfmt(numempty, numrevs))
-    ui.writenoi18n(
-        b'                   text  : '
-        + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
-    )
-    ui.writenoi18n(
-        b'                   delta : '
-        + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
-    )
-    ui.writenoi18n(
-        b'    snapshot  : ' + fmt % pcfmt(numfull + numsemi, numrevs)
-    )
-    for depth in sorted(numsnapdepth):
-        ui.write(
-            (b'      lvl-%-3d :       ' % depth)
-            + fmt % pcfmt(numsnapdepth[depth], numrevs)
-        )
-    ui.writenoi18n(b'    deltas    : ' + fmt % pcfmt(numdeltas, numrevs))
-    ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
-    ui.writenoi18n(
-        b'    snapshot  : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
-    )
-    for depth in sorted(numsnapdepth):
-        ui.write(
-            (b'      lvl-%-3d :       ' % depth)
-            + fmt % pcfmt(snaptotal[depth], totalsize)
-        )
-    ui.writenoi18n(b'    deltas    : ' + fmt % pcfmt(deltatotal, totalsize))
-
-    def fmtchunktype(chunktype):
-        if chunktype == b'empty':
-            return b'    %s     : ' % chunktype
-        elif chunktype in pycompat.bytestr(string.ascii_letters):
-            return b'    0x%s (%s)  : ' % (hex(chunktype), chunktype)
-        else:
-            return b'    0x%s      : ' % hex(chunktype)
-
-    ui.write(b'\n')
-    ui.writenoi18n(b'chunks        : ' + fmt2 % numrevs)
-    for chunktype in sorted(chunktypecounts):
-        ui.write(fmtchunktype(chunktype))
-        ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
-    ui.writenoi18n(b'chunks size   : ' + fmt2 % totalsize)
-    for chunktype in sorted(chunktypecounts):
-        ui.write(fmtchunktype(chunktype))
-        ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
-
-    ui.write(b'\n')
-    fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
-    ui.writenoi18n(b'avg chain length  : ' + fmt % avgchainlen)
-    ui.writenoi18n(b'max chain length  : ' + fmt % maxchainlen)
-    ui.writenoi18n(b'max chain reach   : ' + fmt % maxchainspan)
-    ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
-
-    if format > 0:
-        ui.write(b'\n')
-        ui.writenoi18n(
-            b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
-            % tuple(datasize)
-        )
-    ui.writenoi18n(
-        b'full revision size (min/max/avg)     : %d / %d / %d\n'
-        % tuple(fullsize)
-    )
-    ui.writenoi18n(
-        b'inter-snapshot size (min/max/avg)    : %d / %d / %d\n'
-        % tuple(semisize)
-    )
-    for depth in sorted(snapsizedepth):
-        if depth == 0:
-            continue
-        ui.writenoi18n(
-            b'    level-%-3d (min/max/avg)          : %d / %d / %d\n'
-            % ((depth,) + tuple(snapsizedepth[depth]))
-        )
-    ui.writenoi18n(
-        b'delta size (min/max/avg)             : %d / %d / %d\n'
-        % tuple(deltasize)
-    )
-
-    if numdeltas > 0:
-        ui.write(b'\n')
-        fmt = pcfmtstr(numdeltas)
-        fmt2 = pcfmtstr(numdeltas, 4)
-        ui.writenoi18n(
-            b'deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas)
-        )
-        if numprev > 0:
-            ui.writenoi18n(
-                b'    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev)
-            )
-            ui.writenoi18n(
-                b'    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev)
-            )
-            ui.writenoi18n(
-                b'    other            : ' + fmt2 % pcfmt(numoprev, numprev)
-            )
-        if gdelta:
-            ui.writenoi18n(
-                b'deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas)
-            )
-            ui.writenoi18n(
-                b'deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas)
-            )
-            ui.writenoi18n(
-                b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
-            )
+        revlog_debug.debug_revlog(ui, r)
+    return 0
 
 
 @command(
--- a/mercurial/filelog.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/filelog.py	Fri Nov 25 15:14:40 2022 +0100
@@ -111,6 +111,7 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         return self._revlog.emitrevisions(
             nodes,
@@ -119,6 +120,7 @@
             assumehaveparentrevisions=assumehaveparentrevisions,
             deltamode=deltamode,
             sidedata_helpers=sidedata_helpers,
+            debug_info=debug_info,
         )
 
     def addrevision(
@@ -151,6 +153,7 @@
         addrevisioncb=None,
         duplicaterevisioncb=None,
         maybemissingparents=False,
+        debug_info=None,
     ):
         if maybemissingparents:
             raise error.Abort(
@@ -171,6 +174,7 @@
                 transaction,
                 addrevisioncb=addrevisioncb,
                 duplicaterevisioncb=duplicaterevisioncb,
+                debug_info=debug_info,
             )
 
     def getstrippoint(self, minlink):
--- a/mercurial/filemerge.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/filemerge.py	Fri Nov 25 15:14:40 2022 +0100
@@ -158,7 +158,7 @@
             continue
         p = util.lookupreg(k, _toolstr(ui, tool, b"regname"))
         if p:
-            p = procutil.findexe(p + _toolstr(ui, tool, b"regappend", b""))
+            p = procutil.findexe(p + _toolstr(ui, tool, b"regappend"))
             if p:
                 return p
     exe = _toolstr(ui, tool, b"executable", tool)
--- a/mercurial/help.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/help.py	Fri Nov 25 15:14:40 2022 +0100
@@ -10,6 +10,18 @@
 import re
 import textwrap
 
+from typing import (
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    Union,
+    cast,
+)
+
 from .i18n import (
     _,
     gettext,
@@ -40,7 +52,16 @@
     stringutil,
 )
 
-_exclkeywords = {
+_DocLoader = Callable[[uimod.ui], bytes]
+# Old extensions may not register with a category
+_HelpEntry = Union["_HelpEntryNoCategory", "_HelpEntryWithCategory"]
+_HelpEntryNoCategory = Tuple[List[bytes], bytes, _DocLoader]
+_HelpEntryWithCategory = Tuple[List[bytes], bytes, _DocLoader, bytes]
+_SelectFn = Callable[[object], bool]
+_SynonymTable = Dict[bytes, List[bytes]]
+_TopicHook = Callable[[uimod.ui, bytes, bytes], bytes]
+
+_exclkeywords: Set[bytes] = {
     b"(ADVANCED)",
     b"(DEPRECATED)",
     b"(EXPERIMENTAL)",
@@ -56,7 +77,7 @@
 # Extensions with custom categories should insert them into this list
 # after/before the appropriate item, rather than replacing the list or
 # assuming absolute positions.
-CATEGORY_ORDER = [
+CATEGORY_ORDER: List[bytes] = [
     registrar.command.CATEGORY_REPO_CREATION,
     registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT,
     registrar.command.CATEGORY_COMMITTING,
@@ -74,7 +95,7 @@
 
 # Human-readable category names. These are translated.
 # Extensions with custom categories should add their names here.
-CATEGORY_NAMES = {
+CATEGORY_NAMES: Dict[bytes, bytes] = {
     registrar.command.CATEGORY_REPO_CREATION: b'Repository creation',
     registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT: b'Remote repository management',
     registrar.command.CATEGORY_COMMITTING: b'Change creation',
@@ -102,7 +123,7 @@
 # Extensions with custom categories should insert them into this list
 # after/before the appropriate item, rather than replacing the list or
 # assuming absolute positions.
-TOPIC_CATEGORY_ORDER = [
+TOPIC_CATEGORY_ORDER: List[bytes] = [
     TOPIC_CATEGORY_IDS,
     TOPIC_CATEGORY_OUTPUT,
     TOPIC_CATEGORY_CONFIG,
@@ -112,7 +133,7 @@
 ]
 
 # Human-readable topic category names. These are translated.
-TOPIC_CATEGORY_NAMES = {
+TOPIC_CATEGORY_NAMES: Dict[bytes, bytes] = {
     TOPIC_CATEGORY_IDS: b'Mercurial identifiers',
     TOPIC_CATEGORY_OUTPUT: b'Mercurial output',
     TOPIC_CATEGORY_CONFIG: b'Mercurial configuration',
@@ -122,7 +143,12 @@
 }
 
 
-def listexts(header, exts, indent=1, showdeprecated=False):
+def listexts(
+    header: bytes,
+    exts: Dict[bytes, bytes],
+    indent: int = 1,
+    showdeprecated: bool = False,
+) -> List[bytes]:
     '''return a text listing of the given extensions'''
     rst = []
     if exts:
@@ -135,7 +161,7 @@
     return rst
 
 
-def extshelp(ui):
+def extshelp(ui: uimod.ui) -> bytes:
     rst = loaddoc(b'extensions')(ui).splitlines(True)
     rst.extend(
         listexts(
@@ -153,7 +179,7 @@
     return doc
 
 
-def parsedefaultmarker(text):
+def parsedefaultmarker(text: bytes) -> Optional[Tuple[bytes, List[bytes]]]:
     """given a text 'abc (DEFAULT: def.ghi)',
     returns (b'abc', (b'def', b'ghi')). Otherwise return None"""
     if text[-1:] == b')':
@@ -164,7 +190,7 @@
             return text[:pos], item.split(b'.', 2)
 
 
-def optrst(header, options, verbose, ui):
+def optrst(header: bytes, options, verbose: bool, ui: uimod.ui) -> bytes:
     data = []
     multioccur = False
     for option in options:
@@ -220,13 +246,15 @@
     return b''.join(rst)
 
 
-def indicateomitted(rst, omitted, notomitted=None):
+def indicateomitted(
+    rst: List[bytes], omitted: bytes, notomitted: Optional[bytes] = None
+) -> None:
     rst.append(b'\n\n.. container:: omitted\n\n    %s\n\n' % omitted)
     if notomitted:
         rst.append(b'\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
 
 
-def filtercmd(ui, cmd, func, kw, doc):
+def filtercmd(ui: uimod.ui, cmd: bytes, func, kw: bytes, doc: bytes) -> bool:
     if not ui.debugflag and cmd.startswith(b"debug") and kw != b"debug":
         # Debug command, and user is not looking for those.
         return True
@@ -249,11 +277,13 @@
     return False
 
 
-def filtertopic(ui, topic):
+def filtertopic(ui: uimod.ui, topic: bytes) -> bool:
     return ui.configbool(b'help', b'hidden-topic.%s' % topic, False)
 
 
-def topicmatch(ui, commands, kw):
+def topicmatch(
+    ui: uimod.ui, commands, kw: bytes
+) -> Dict[bytes, List[Tuple[bytes, bytes]]]:
     """Return help topics matching kw.
 
     Returns {'section': [(name, summary), ...], ...} where section is
@@ -326,10 +356,10 @@
     return results
 
 
-def loaddoc(topic, subdir=None):
+def loaddoc(topic: bytes, subdir: Optional[bytes] = None) -> _DocLoader:
     """Return a delayed loader for help/topic.txt."""
 
-    def loader(ui):
+    def loader(ui: uimod.ui) -> bytes:
         package = b'mercurial.helptext'
         if subdir:
             package += b'.' + subdir
@@ -342,7 +372,7 @@
     return loader
 
 
-internalstable = sorted(
+internalstable: List[_HelpEntryNoCategory] = sorted(
     [
         (
             [b'bid-merge'],
@@ -407,7 +437,7 @@
 )
 
 
-def internalshelp(ui):
+def internalshelp(ui: uimod.ui) -> bytes:
     """Generate the index for the "internals" topic."""
     lines = [
         b'To access a subtopic, use "hg help internals.{subtopic-name}"\n',
@@ -419,7 +449,7 @@
     return b''.join(lines)
 
 
-helptable = sorted(
+helptable: List[_HelpEntryWithCategory] = sorted(
     [
         (
             [b'bundlespec'],
@@ -581,20 +611,27 @@
 )
 
 # Maps topics with sub-topics to a list of their sub-topics.
-subtopics = {
+subtopics: Dict[bytes, List[_HelpEntryNoCategory]] = {
     b'internals': internalstable,
 }
 
 # Map topics to lists of callable taking the current topic help and
 # returning the updated version
-helphooks = {}
+helphooks: Dict[bytes, List[_TopicHook]] = {}
 
 
-def addtopichook(topic, rewriter):
+def addtopichook(topic: bytes, rewriter: _TopicHook) -> None:
     helphooks.setdefault(topic, []).append(rewriter)
 
 
-def makeitemsdoc(ui, topic, doc, marker, items, dedent=False):
+def makeitemsdoc(
+    ui: uimod.ui,
+    topic: bytes,
+    doc: bytes,
+    marker: bytes,
+    items: Dict[bytes, bytes],
+    dedent: bool = False,
+) -> bytes:
     """Extract docstring from the items key to function mapping, build a
     single documentation block and use it to overwrite the marker in doc.
     """
@@ -622,8 +659,10 @@
     return doc.replace(marker, entries)
 
 
-def addtopicsymbols(topic, marker, symbols, dedent=False):
-    def add(ui, topic, doc):
+def addtopicsymbols(
+    topic: bytes, marker: bytes, symbols, dedent: bool = False
+) -> None:
+    def add(ui: uimod.ui, topic: bytes, doc: bytes):
         return makeitemsdoc(ui, topic, doc, marker, symbols, dedent=dedent)
 
     addtopichook(topic, add)
@@ -647,7 +686,7 @@
 )
 
 
-def inserttweakrc(ui, topic, doc):
+def inserttweakrc(ui: uimod.ui, topic: bytes, doc: bytes) -> bytes:
     marker = b'.. tweakdefaultsmarker'
     repl = uimod.tweakrc
 
@@ -658,7 +697,9 @@
     return re.sub(br'( *)%s' % re.escape(marker), sub, doc)
 
 
-def _getcategorizedhelpcmds(ui, cmdtable, name, select=None):
+def _getcategorizedhelpcmds(
+    ui: uimod.ui, cmdtable, name: bytes, select: Optional[_SelectFn] = None
+) -> Tuple[Dict[bytes, List[bytes]], Dict[bytes, bytes], _SynonymTable]:
     # Category -> list of commands
     cats = {}
     # Command -> short description
@@ -687,16 +728,18 @@
     return cats, h, syns
 
 
-def _getcategorizedhelptopics(ui, topictable):
+def _getcategorizedhelptopics(
+    ui: uimod.ui, topictable: List[_HelpEntry]
+) -> Tuple[Dict[bytes, List[Tuple[bytes, bytes]]], Dict[bytes, List[bytes]]]:
     # Group commands by category.
     topiccats = {}
     syns = {}
     for topic in topictable:
         names, header, doc = topic[0:3]
         if len(topic) > 3 and topic[3]:
-            category = topic[3]
+            category: bytes = cast(bytes, topic[3])  # help pytype
         else:
-            category = TOPIC_CATEGORY_NONE
+            category: bytes = TOPIC_CATEGORY_NONE
 
         topicname = names[0]
         syns[topicname] = list(names)
@@ -709,15 +752,15 @@
 
 
 def help_(
-    ui,
+    ui: uimod.ui,
     commands,
-    name,
-    unknowncmd=False,
-    full=True,
-    subtopic=None,
-    fullname=None,
+    name: bytes,
+    unknowncmd: bool = False,
+    full: bool = True,
+    subtopic: Optional[bytes] = None,
+    fullname: Optional[bytes] = None,
     **opts
-):
+) -> bytes:
     """
     Generate the help for 'name' as unformatted restructured text. If
     'name' is None, describe the commands available.
@@ -725,7 +768,7 @@
 
     opts = pycompat.byteskwargs(opts)
 
-    def helpcmd(name, subtopic=None):
+    def helpcmd(name: bytes, subtopic: Optional[bytes]) -> List[bytes]:
         try:
             aliases, entry = cmdutil.findcmd(
                 name, commands.table, strict=unknowncmd
@@ -826,7 +869,7 @@
 
         return rst
 
-    def helplist(select=None, **opts):
+    def helplist(select: Optional[_SelectFn] = None, **opts) -> List[bytes]:
         cats, h, syns = _getcategorizedhelpcmds(
             ui, commands.table, name, select
         )
@@ -846,7 +889,7 @@
             else:
                 rst.append(_(b'list of commands:\n'))
 
-        def appendcmds(cmds):
+        def appendcmds(cmds: Iterable[bytes]) -> None:
             cmds = sorted(cmds)
             for c in cmds:
                 display_cmd = c
@@ -955,7 +998,7 @@
                 )
         return rst
 
-    def helptopic(name, subtopic=None):
+    def helptopic(name: bytes, subtopic: Optional[bytes] = None) -> List[bytes]:
         # Look for sub-topic entry first.
         header, doc = None, None
         if subtopic and name in subtopics:
@@ -998,7 +1041,7 @@
             pass
         return rst
 
-    def helpext(name, subtopic=None):
+    def helpext(name: bytes, subtopic: Optional[bytes] = None) -> List[bytes]:
         try:
             mod = extensions.find(name)
             doc = gettext(pycompat.getdoc(mod)) or _(b'no help text available')
@@ -1040,7 +1083,9 @@
             )
         return rst
 
-    def helpextcmd(name, subtopic=None):
+    def helpextcmd(
+        name: bytes, subtopic: Optional[bytes] = None
+    ) -> List[bytes]:
         cmd, ext, doc = extensions.disabledcmd(
             ui, name, ui.configbool(b'ui', b'strict')
         )
@@ -1127,8 +1172,14 @@
 
 
 def formattedhelp(
-    ui, commands, fullname, keep=None, unknowncmd=False, full=True, **opts
-):
+    ui: uimod.ui,
+    commands,
+    fullname: Optional[bytes],
+    keep: Optional[Iterable[bytes]] = None,
+    unknowncmd: bool = False,
+    full: bool = True,
+    **opts
+) -> bytes:
     """get help for a given topic (as a dotted name) as rendered rst
 
     Either returns the rendered help text or raises an exception.
--- a/mercurial/helptext/config.txt	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/helptext/config.txt	Fri Nov 25 15:14:40 2022 +0100
@@ -2281,6 +2281,21 @@
     To fix affected revisions that already exist within the repository, one can
     use :hg:`debug-repair-issue-6528`.
 
+.. container:: verbose
+
+    ``revlog.delta-parent-search.candidate-group-chunk-size``
+        Tune the number of delta bases the storage will consider in the
+        same "round" of search. In some very rare cases, using a smaller value
+        might result in faster processing at the possible expense of storage
+        space, while using larger values might result in slower processing at the
+        possible benefit of storage space. A value of "0" means no limitation.
+
+        default: no limitation
+
+        This is unlikely that you'll have to tune this configuration. If you think
+        you do, consider talking with the mercurial developer community about your
+        repositories.
+
 ``revlog.optimize-delta-parent-choice``
     When storing a merge revision, both parents will be equally considered as
     a possible delta base. This results in better delta selection and improved
--- a/mercurial/helptext/rust.txt	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/helptext/rust.txt	Fri Nov 25 15:14:40 2022 +0100
@@ -76,8 +76,8 @@
 MSRV
 ====
 
-The minimum supported Rust version is currently 1.48.0. The project's policy is
-to follow the version from Debian stable, to make the distributions' job easier.
+The minimum supported Rust version is currently 1.61.0. The project's policy is
+to follow the version from Debian testing, to make the distributions' job easier.
 
 rhg
 ===
--- a/mercurial/localrepo.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/localrepo.py	Fri Nov 25 15:14:40 2022 +0100
@@ -15,6 +15,10 @@
 import weakref
 
 from concurrent import futures
+from typing import (
+    Optional,
+)
+
 from .i18n import _
 from .node import (
     bin,
@@ -526,7 +530,7 @@
     return set(read(b'requires').splitlines())
 
 
-def makelocalrepository(baseui, path, intents=None):
+def makelocalrepository(baseui, path: bytes, intents=None):
     """Create a local repository object.
 
     Given arguments needed to construct a local repository, this function
@@ -845,7 +849,13 @@
     )
 
 
-def loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs=None):
+def loadhgrc(
+    ui,
+    wdirvfs: vfsmod.vfs,
+    hgvfs: vfsmod.vfs,
+    requirements,
+    sharedvfs: Optional[vfsmod.vfs] = None,
+):
     """Load hgrc files/content into a ui instance.
 
     This is called during repository opening to load any additional
@@ -1058,6 +1068,8 @@
         options[b'revlogv2'] = True
     if requirementsmod.CHANGELOGV2_REQUIREMENT in requirements:
         options[b'changelogv2'] = True
+        cmp_rank = ui.configbool(b'experimental', b'changelog-v2.compute-rank')
+        options[b'changelogv2.compute-rank'] = cmp_rank
 
     if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
         options[b'generaldelta'] = True
@@ -1071,6 +1083,11 @@
         b'storage', b'revlog.optimize-delta-parent-choice'
     )
     options[b'deltabothparents'] = deltabothparents
+    dps_cgds = ui.configint(
+        b'storage',
+        b'revlog.delta-parent-search.candidate-group-chunk-size',
+    )
+    options[b'delta-parent-search.candidate-group-chunk-size'] = dps_cgds
     options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
 
     issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
@@ -1323,15 +1340,15 @@
         self,
         baseui,
         ui,
-        origroot,
-        wdirvfs,
-        hgvfs,
+        origroot: bytes,
+        wdirvfs: vfsmod.vfs,
+        hgvfs: vfsmod.vfs,
         requirements,
         supportedrequirements,
-        sharedpath,
+        sharedpath: bytes,
         store,
-        cachevfs,
-        wcachevfs,
+        cachevfs: vfsmod.vfs,
+        wcachevfs: vfsmod.vfs,
         features,
         intents=None,
     ):
@@ -1977,7 +1994,7 @@
     def __iter__(self):
         return iter(self.changelog)
 
-    def revs(self, expr, *args):
+    def revs(self, expr: bytes, *args):
         """Find revisions matching a revset.
 
         The revset is specified as a string ``expr`` that may contain
@@ -1993,7 +2010,7 @@
         tree = revsetlang.spectree(expr, *args)
         return revset.makematcher(tree)(self)
 
-    def set(self, expr, *args):
+    def set(self, expr: bytes, *args):
         """Find revisions matching a revset and emit changectx instances.
 
         This is a convenience wrapper around ``revs()`` that iterates the
@@ -2005,7 +2022,7 @@
         for r in self.revs(expr, *args):
             yield self[r]
 
-    def anyrevs(self, specs, user=False, localalias=None):
+    def anyrevs(self, specs: bytes, user=False, localalias=None):
         """Find revisions matching one of the given revsets.
 
         Revset aliases from the configuration are not expanded by default. To
@@ -2030,7 +2047,7 @@
             m = revset.matchany(None, specs, localalias=localalias)
         return m(self)
 
-    def url(self):
+    def url(self) -> bytes:
         return b'file:' + self.root
 
     def hook(self, name, throw=False, **args):
@@ -2229,7 +2246,7 @@
             return b'store'
         return None
 
-    def wjoin(self, f, *insidef):
+    def wjoin(self, f: bytes, *insidef: bytes) -> bytes:
         return self.vfs.reljoin(self.root, f, *insidef)
 
     def setparents(self, p1, p2=None):
@@ -2238,17 +2255,17 @@
         self[None].setparents(p1, p2)
         self._quick_access_changeid_invalidate()
 
-    def filectx(self, path, changeid=None, fileid=None, changectx=None):
+    def filectx(self, path: bytes, changeid=None, fileid=None, changectx=None):
         """changeid must be a changeset revision, if specified.
         fileid can be a file revision or node."""
         return context.filectx(
             self, path, changeid, fileid, changectx=changectx
         )
 
-    def getcwd(self):
+    def getcwd(self) -> bytes:
         return self.dirstate.getcwd()
 
-    def pathto(self, f, cwd=None):
+    def pathto(self, f: bytes, cwd: Optional[bytes] = None) -> bytes:
         return self.dirstate.pathto(f, cwd)
 
     def _loadfilter(self, filter):
@@ -2300,14 +2317,21 @@
     def adddatafilter(self, name, filter):
         self._datafilters[name] = filter
 
-    def wread(self, filename):
+    def wread(self, filename: bytes) -> bytes:
         if self.wvfs.islink(filename):
             data = self.wvfs.readlink(filename)
         else:
             data = self.wvfs.read(filename)
         return self._filter(self._encodefilterpats, filename, data)
 
-    def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
+    def wwrite(
+        self,
+        filename: bytes,
+        data: bytes,
+        flags: bytes,
+        backgroundclose=False,
+        **kwargs
+    ) -> int:
         """write ``data`` into ``filename`` in the working directory
 
         This returns length of written (maybe decoded) data.
@@ -2325,7 +2349,7 @@
                 self.wvfs.setflags(filename, False, False)
         return len(data)
 
-    def wwritedata(self, filename, data):
+    def wwritedata(self, filename: bytes, data: bytes) -> bytes:
         return self._filter(self._decodefilterpats, filename, data)
 
     def currenttransaction(self):
@@ -3520,13 +3544,13 @@
     return a
 
 
-def undoname(fn):
+def undoname(fn: bytes) -> bytes:
     base, name = os.path.split(fn)
     assert name.startswith(b'journal')
     return os.path.join(base, name.replace(b'journal', b'undo', 1))
 
 
-def instance(ui, path, create, intents=None, createopts=None):
+def instance(ui, path: bytes, create, intents=None, createopts=None):
 
     # prevent cyclic import localrepo -> upgrade -> localrepo
     from . import upgrade
@@ -3543,7 +3567,7 @@
     return repo
 
 
-def islocal(path):
+def islocal(path: bytes) -> bool:
     return True
 
 
@@ -3803,7 +3827,7 @@
     return {k: v for k, v in createopts.items() if k not in known}
 
 
-def createrepository(ui, path, createopts=None, requirements=None):
+def createrepository(ui, path: bytes, createopts=None, requirements=None):
     """Create a new repository in a vfs.
 
     ``path`` path to the new repo's working directory.
--- a/mercurial/manifest.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/manifest.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1836,6 +1836,7 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         return self._revlog.emitrevisions(
             nodes,
@@ -1844,6 +1845,7 @@
             assumehaveparentrevisions=assumehaveparentrevisions,
             deltamode=deltamode,
             sidedata_helpers=sidedata_helpers,
+            debug_info=debug_info,
         )
 
     def addgroup(
@@ -1854,6 +1856,7 @@
         alwayscache=False,
         addrevisioncb=None,
         duplicaterevisioncb=None,
+        debug_info=None,
     ):
         return self._revlog.addgroup(
             deltas,
@@ -1862,6 +1865,7 @@
             alwayscache=alwayscache,
             addrevisioncb=addrevisioncb,
             duplicaterevisioncb=duplicaterevisioncb,
+            debug_info=debug_info,
         )
 
     def rawsize(self, rev):
--- a/mercurial/pure/bdiff.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/pure/bdiff.py	Fri Nov 25 15:14:40 2022 +0100
@@ -10,8 +10,13 @@
 import re
 import struct
 
+from typing import (
+    List,
+    Tuple,
+)
 
-def splitnewlines(text):
+
+def splitnewlines(text: bytes) -> List[bytes]:
     '''like str.splitlines, but only split on newlines.'''
     lines = [l + b'\n' for l in text.split(b'\n')]
     if lines:
@@ -22,7 +27,9 @@
     return lines
 
 
-def _normalizeblocks(a, b, blocks):
+def _normalizeblocks(
+    a: List[bytes], b: List[bytes], blocks
+) -> List[Tuple[int, int, int]]:
     prev = None
     r = []
     for curr in blocks:
@@ -57,7 +64,7 @@
     return r
 
 
-def bdiff(a, b):
+def bdiff(a: bytes, b: bytes) -> bytes:
     a = bytes(a).splitlines(True)
     b = bytes(b).splitlines(True)
 
@@ -84,7 +91,7 @@
     return b"".join(bin)
 
 
-def blocks(a, b):
+def blocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]:
     an = splitnewlines(a)
     bn = splitnewlines(b)
     d = difflib.SequenceMatcher(None, an, bn).get_matching_blocks()
@@ -92,7 +99,7 @@
     return [(i, i + n, j, j + n) for (i, j, n) in d]
 
 
-def fixws(text, allws):
+def fixws(text: bytes, allws: bool) -> bytes:
     if allws:
         text = re.sub(b'[ \t\r]+', b'', text)
     else:
--- a/mercurial/pure/mpatch.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/pure/mpatch.py	Fri Nov 25 15:14:40 2022 +0100
@@ -9,6 +9,11 @@
 import io
 import struct
 
+from typing import (
+    List,
+    Tuple,
+)
+
 
 stringio = io.BytesIO
 
@@ -28,7 +33,9 @@
 # temporary string buffers.
 
 
-def _pull(dst, src, l):  # pull l bytes from src
+def _pull(
+    dst: List[Tuple[int, int]], src: List[Tuple[int, int]], l: int
+) -> None:  # pull l bytes from src
     while l:
         f = src.pop()
         if f[0] > l:  # do we need to split?
@@ -39,7 +46,7 @@
         l -= f[0]
 
 
-def _move(m, dest, src, count):
+def _move(m: stringio, dest: int, src: int, count: int) -> None:
     """move count bytes from src to dest
 
     The file pointer is left at the end of dest.
@@ -50,7 +57,9 @@
     m.write(buf)
 
 
-def _collect(m, buf, list):
+def _collect(
+    m: stringio, buf: int, list: List[Tuple[int, int]]
+) -> Tuple[int, int]:
     start = buf
     for l, p in reversed(list):
         _move(m, buf, p, l)
@@ -58,7 +67,7 @@
     return (buf - start, start)
 
 
-def patches(a, bins):
+def patches(a: bytes, bins: List[bytes]) -> bytes:
     if not bins:
         return a
 
@@ -111,7 +120,7 @@
     return m.read(t[0])
 
 
-def patchedsize(orig, delta):
+def patchedsize(orig: int, delta: bytes) -> int:
     outlen, last, bin = 0, 0, 0
     binend = len(delta)
     data = 12
--- a/mercurial/revlog.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/revlog.py	Fri Nov 25 15:14:40 2022 +0100
@@ -44,6 +44,7 @@
     FLAG_INLINE_DATA,
     INDEX_HEADER,
     KIND_CHANGELOG,
+    KIND_FILELOG,
     RANK_UNKNOWN,
     REVLOGV0,
     REVLOGV1,
@@ -347,6 +348,7 @@
         self._chunkcachesize = 65536
         self._maxchainlen = None
         self._deltabothparents = True
+        self._candidate_group_chunk_size = 0
         self._debug_delta = False
         self.index = None
         self._docket = None
@@ -363,6 +365,11 @@
         self._srdensitythreshold = 0.50
         self._srmingapsize = 262144
 
+        # other optionnals features
+
+        # might remove rank configuration once the computation has no impact
+        self._compute_rank = False
+
         # Make copy of flag processors so each revlog instance can support
         # custom flags.
         self._flagprocessors = dict(flagutil.flagprocessors)
@@ -404,6 +411,7 @@
 
         if b'changelogv2' in opts and self.revlog_kind == KIND_CHANGELOG:
             new_header = CHANGELOGV2
+            self._compute_rank = opts.get(b'changelogv2.compute-rank', True)
         elif b'revlogv2' in opts:
             new_header = REVLOGV2
         elif b'revlogv1' in opts:
@@ -421,6 +429,9 @@
             self._maxchainlen = opts[b'maxchainlen']
         if b'deltabothparents' in opts:
             self._deltabothparents = opts[b'deltabothparents']
+        dps_cgds = opts.get(b'delta-parent-search.candidate-group-chunk-size')
+        if dps_cgds:
+            self._candidate_group_chunk_size = dps_cgds
         self._lazydelta = bool(opts.get(b'lazydelta', True))
         self._lazydeltabase = False
         if self._lazydelta:
@@ -505,7 +516,6 @@
             self._docket = docket
             self._docket_file = entry_point
         else:
-            entry_data = b''
             self._initempty = True
             entry_data = self._get_data(entry_point, mmapindexthreshold)
             if len(entry_data) > 0:
@@ -653,9 +663,12 @@
     @util.propertycache
     def display_id(self):
         """The public facing "ID" of the revlog that we use in message"""
-        # Maybe we should build a user facing representation of
-        # revlog.target instead of using `self.radix`
-        return self.radix
+        if self.revlog_kind == KIND_FILELOG:
+            # Reference the file without the "data/" prefix, so it is familiar
+            # to the user.
+            return self.target[1]
+        else:
+            return self.radix
 
     def _get_decompressor(self, t):
         try:
@@ -2492,7 +2505,7 @@
             sidedata_offset = 0
 
         rank = RANK_UNKNOWN
-        if self._format_version == CHANGELOGV2:
+        if self._compute_rank:
             if (p1r, p2r) == (nullrev, nullrev):
                 rank = 1
             elif p1r != nullrev and p2r == nullrev:
@@ -2637,6 +2650,7 @@
         alwayscache=False,
         addrevisioncb=None,
         duplicaterevisioncb=None,
+        debug_info=None,
     ):
         """
         add a delta group
@@ -2662,6 +2676,7 @@
                 deltacomputer = deltautil.deltacomputer(
                     self,
                     write_debug=write_debug,
+                    debug_info=debug_info,
                 )
                 # loop through our set of deltas
                 for data in deltas:
@@ -2886,6 +2901,7 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
         sidedata_helpers=None,
+        debug_info=None,
     ):
         if nodesorder not in (b'nodes', b'storage', b'linear', None):
             raise error.ProgrammingError(
@@ -2915,6 +2931,7 @@
             revisiondata=revisiondata,
             assumehaveparentrevisions=assumehaveparentrevisions,
             sidedata_helpers=sidedata_helpers,
+            debug_info=debug_info,
         )
 
     DELTAREUSEALWAYS = b'always'
--- a/mercurial/revlogutils/debug.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/revlogutils/debug.py	Fri Nov 25 15:14:40 2022 +0100
@@ -6,8 +6,12 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import collections
+import string
+
 from .. import (
     node as nodemod,
+    util,
 )
 
 from . import (
@@ -216,3 +220,402 @@
         fm.plain(b'\n')
 
     fm.end()
+
+
+def dump(ui, revlog):
+    """perform the work for `hg debugrevlog --dump"""
+    # XXX seems redundant with debug index ?
+    r = revlog
+    numrevs = len(r)
+    ui.write(
+        (
+            b"# rev p1rev p2rev start   end deltastart base   p1   p2"
+            b" rawsize totalsize compression heads chainlen\n"
+        )
+    )
+    ts = 0
+    heads = set()
+
+    for rev in range(numrevs):
+        dbase = r.deltaparent(rev)
+        if dbase == -1:
+            dbase = rev
+        cbase = r.chainbase(rev)
+        clen = r.chainlen(rev)
+        p1, p2 = r.parentrevs(rev)
+        rs = r.rawsize(rev)
+        ts = ts + rs
+        heads -= set(r.parentrevs(rev))
+        heads.add(rev)
+        try:
+            compression = ts / r.end(rev)
+        except ZeroDivisionError:
+            compression = 0
+        ui.write(
+            b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
+            b"%11d %5d %8d\n"
+            % (
+                rev,
+                p1,
+                p2,
+                r.start(rev),
+                r.end(rev),
+                r.start(dbase),
+                r.start(cbase),
+                r.start(p1),
+                r.start(p2),
+                rs,
+                ts,
+                compression,
+                len(heads),
+                clen,
+            )
+        )
+
+
+def debug_revlog(ui, revlog):
+    """code for `hg debugrevlog`"""
+    r = revlog
+    format = r._format_version
+    v = r._format_flags
+    flags = []
+    gdelta = False
+    if v & constants.FLAG_INLINE_DATA:
+        flags.append(b'inline')
+    if v & constants.FLAG_GENERALDELTA:
+        gdelta = True
+        flags.append(b'generaldelta')
+    if not flags:
+        flags = [b'(none)']
+
+    ### the total size of stored content if incompressed.
+    full_text_total_size = 0
+    ### tracks merge vs single parent
+    nummerges = 0
+
+    ### tracks ways the "delta" are build
+    # nodelta
+    numempty = 0
+    numemptytext = 0
+    numemptydelta = 0
+    # full file content
+    numfull = 0
+    # intermediate snapshot against a prior snapshot
+    numsemi = 0
+    # snapshot count per depth
+    numsnapdepth = collections.defaultdict(lambda: 0)
+    # number of snapshots with a non-ancestor delta
+    numsnapdepth_nad = collections.defaultdict(lambda: 0)
+    # delta against previous revision
+    numprev = 0
+    # delta against prev, where prev is a non-ancestor
+    numprev_nad = 0
+    # delta against first or second parent (not prev)
+    nump1 = 0
+    nump2 = 0
+    # delta against neither prev nor parents
+    numother = 0
+    # delta against other that is a non-ancestor
+    numother_nad = 0
+    # delta against prev that are also first or second parent
+    # (details of `numprev`)
+    nump1prev = 0
+    nump2prev = 0
+
+    # data about delta chain of each revs
+    chainlengths = []
+    chainbases = []
+    chainspans = []
+
+    # data about each revision
+    datasize = [None, 0, 0]
+    fullsize = [None, 0, 0]
+    semisize = [None, 0, 0]
+    # snapshot count per depth
+    snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
+    deltasize = [None, 0, 0]
+    chunktypecounts = {}
+    chunktypesizes = {}
+
+    def addsize(size, l):
+        if l[0] is None or size < l[0]:
+            l[0] = size
+        if size > l[1]:
+            l[1] = size
+        l[2] += size
+
+    numrevs = len(r)
+    for rev in range(numrevs):
+        p1, p2 = r.parentrevs(rev)
+        delta = r.deltaparent(rev)
+        if format > 0:
+            s = r.rawsize(rev)
+            full_text_total_size += s
+            addsize(s, datasize)
+        if p2 != nodemod.nullrev:
+            nummerges += 1
+        size = r.length(rev)
+        if delta == nodemod.nullrev:
+            chainlengths.append(0)
+            chainbases.append(r.start(rev))
+            chainspans.append(size)
+            if size == 0:
+                numempty += 1
+                numemptytext += 1
+            else:
+                numfull += 1
+                numsnapdepth[0] += 1
+                addsize(size, fullsize)
+                addsize(size, snapsizedepth[0])
+        else:
+            nad = (
+                delta != p1 and delta != p2 and not r.isancestorrev(delta, rev)
+            )
+            chainlengths.append(chainlengths[delta] + 1)
+            baseaddr = chainbases[delta]
+            revaddr = r.start(rev)
+            chainbases.append(baseaddr)
+            chainspans.append((revaddr - baseaddr) + size)
+            if size == 0:
+                numempty += 1
+                numemptydelta += 1
+            elif r.issnapshot(rev):
+                addsize(size, semisize)
+                numsemi += 1
+                depth = r.snapshotdepth(rev)
+                numsnapdepth[depth] += 1
+                if nad:
+                    numsnapdepth_nad[depth] += 1
+                addsize(size, snapsizedepth[depth])
+            else:
+                addsize(size, deltasize)
+                if delta == rev - 1:
+                    numprev += 1
+                    if delta == p1:
+                        nump1prev += 1
+                    elif delta == p2:
+                        nump2prev += 1
+                    elif nad:
+                        numprev_nad += 1
+                elif delta == p1:
+                    nump1 += 1
+                elif delta == p2:
+                    nump2 += 1
+                elif delta != nodemod.nullrev:
+                    numother += 1
+                    numother_nad += 1
+
+        # Obtain data on the raw chunks in the revlog.
+        if util.safehasattr(r, '_getsegmentforrevs'):
+            segment = r._getsegmentforrevs(rev, rev)[1]
+        else:
+            segment = r._revlog._getsegmentforrevs(rev, rev)[1]
+        if segment:
+            chunktype = bytes(segment[0:1])
+        else:
+            chunktype = b'empty'
+
+        if chunktype not in chunktypecounts:
+            chunktypecounts[chunktype] = 0
+            chunktypesizes[chunktype] = 0
+
+        chunktypecounts[chunktype] += 1
+        chunktypesizes[chunktype] += size
+
+    # Adjust size min value for empty cases
+    for size in (datasize, fullsize, semisize, deltasize):
+        if size[0] is None:
+            size[0] = 0
+
+    numdeltas = numrevs - numfull - numempty - numsemi
+    numoprev = numprev - nump1prev - nump2prev - numprev_nad
+    num_other_ancestors = numother - numother_nad
+    totalrawsize = datasize[2]
+    datasize[2] /= numrevs
+    fulltotal = fullsize[2]
+    if numfull == 0:
+        fullsize[2] = 0
+    else:
+        fullsize[2] /= numfull
+    semitotal = semisize[2]
+    snaptotal = {}
+    if numsemi > 0:
+        semisize[2] /= numsemi
+    for depth in snapsizedepth:
+        snaptotal[depth] = snapsizedepth[depth][2]
+        snapsizedepth[depth][2] /= numsnapdepth[depth]
+
+    deltatotal = deltasize[2]
+    if numdeltas > 0:
+        deltasize[2] /= numdeltas
+    totalsize = fulltotal + semitotal + deltatotal
+    avgchainlen = sum(chainlengths) / numrevs
+    maxchainlen = max(chainlengths)
+    maxchainspan = max(chainspans)
+    compratio = 1
+    if totalsize:
+        compratio = totalrawsize / totalsize
+
+    basedfmtstr = b'%%%dd\n'
+    basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
+
+    def dfmtstr(max):
+        return basedfmtstr % len(str(max))
+
+    def pcfmtstr(max, padding=0):
+        return basepcfmtstr % (len(str(max)), b' ' * padding)
+
+    def pcfmt(value, total):
+        if total:
+            return (value, 100 * float(value) / total)
+        else:
+            return value, 100.0
+
+    ui.writenoi18n(b'format : %d\n' % format)
+    ui.writenoi18n(b'flags  : %s\n' % b', '.join(flags))
+
+    ui.write(b'\n')
+    fmt = pcfmtstr(totalsize)
+    fmt2 = dfmtstr(totalsize)
+    ui.writenoi18n(b'revisions     : ' + fmt2 % numrevs)
+    ui.writenoi18n(b'    merges    : ' + fmt % pcfmt(nummerges, numrevs))
+    ui.writenoi18n(
+        b'    normal    : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
+    )
+    ui.writenoi18n(b'revisions     : ' + fmt2 % numrevs)
+    ui.writenoi18n(b'    empty     : ' + fmt % pcfmt(numempty, numrevs))
+    ui.writenoi18n(
+        b'                   text  : '
+        + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
+    )
+    ui.writenoi18n(
+        b'                   delta : '
+        + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
+    )
+    ui.writenoi18n(
+        b'    snapshot  : ' + fmt % pcfmt(numfull + numsemi, numrevs)
+    )
+    for depth in sorted(numsnapdepth):
+        base = b'      lvl-%-3d :       ' % depth
+        count = fmt % pcfmt(numsnapdepth[depth], numrevs)
+        pieces = [base, count]
+        if numsnapdepth_nad[depth]:
+            pieces[-1] = count = count[:-1]  # drop the final '\n'
+            more = b'  non-ancestor-bases: '
+            anc_count = fmt
+            anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
+            pieces.append(more)
+            pieces.append(anc_count)
+        ui.write(b''.join(pieces))
+    ui.writenoi18n(b'    deltas    : ' + fmt % pcfmt(numdeltas, numrevs))
+    ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
+    ui.writenoi18n(
+        b'    snapshot  : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
+    )
+    for depth in sorted(numsnapdepth):
+        ui.write(
+            (b'      lvl-%-3d :       ' % depth)
+            + fmt % pcfmt(snaptotal[depth], totalsize)
+        )
+    ui.writenoi18n(b'    deltas    : ' + fmt % pcfmt(deltatotal, totalsize))
+
+    letters = string.ascii_letters.encode('ascii')
+
+    def fmtchunktype(chunktype):
+        if chunktype == b'empty':
+            return b'    %s     : ' % chunktype
+        elif chunktype in letters:
+            return b'    0x%s (%s)  : ' % (nodemod.hex(chunktype), chunktype)
+        else:
+            return b'    0x%s      : ' % nodemod.hex(chunktype)
+
+    ui.write(b'\n')
+    ui.writenoi18n(b'chunks        : ' + fmt2 % numrevs)
+    for chunktype in sorted(chunktypecounts):
+        ui.write(fmtchunktype(chunktype))
+        ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
+    ui.writenoi18n(b'chunks size   : ' + fmt2 % totalsize)
+    for chunktype in sorted(chunktypecounts):
+        ui.write(fmtchunktype(chunktype))
+        ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
+
+    ui.write(b'\n')
+    b_total = b"%d" % full_text_total_size
+    p_total = []
+    while len(b_total) > 3:
+        p_total.append(b_total[-3:])
+        b_total = b_total[:-3]
+    p_total.append(b_total)
+    p_total.reverse()
+    b_total = b' '.join(p_total)
+
+    ui.write(b'\n')
+    ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
+    ui.write(b'\n')
+    fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
+    ui.writenoi18n(b'avg chain length  : ' + fmt % avgchainlen)
+    ui.writenoi18n(b'max chain length  : ' + fmt % maxchainlen)
+    ui.writenoi18n(b'max chain reach   : ' + fmt % maxchainspan)
+    ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
+
+    if format > 0:
+        ui.write(b'\n')
+        ui.writenoi18n(
+            b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
+            % tuple(datasize)
+        )
+    ui.writenoi18n(
+        b'full revision size (min/max/avg)     : %d / %d / %d\n'
+        % tuple(fullsize)
+    )
+    ui.writenoi18n(
+        b'inter-snapshot size (min/max/avg)    : %d / %d / %d\n'
+        % tuple(semisize)
+    )
+    for depth in sorted(snapsizedepth):
+        if depth == 0:
+            continue
+        ui.writenoi18n(
+            b'    level-%-3d (min/max/avg)          : %d / %d / %d\n'
+            % ((depth,) + tuple(snapsizedepth[depth]))
+        )
+    ui.writenoi18n(
+        b'delta size (min/max/avg)             : %d / %d / %d\n'
+        % tuple(deltasize)
+    )
+
+    if numdeltas > 0:
+        ui.write(b'\n')
+        fmt = pcfmtstr(numdeltas)
+        fmt2 = pcfmtstr(numdeltas, 4)
+        ui.writenoi18n(
+            b'deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas)
+        )
+        if numprev > 0:
+            ui.writenoi18n(
+                b'    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev)
+            )
+            ui.writenoi18n(
+                b'    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev)
+            )
+            ui.writenoi18n(
+                b'    other-ancestor   : ' + fmt2 % pcfmt(numoprev, numprev)
+            )
+            ui.writenoi18n(
+                b'    unrelated        : ' + fmt2 % pcfmt(numoprev, numprev)
+            )
+        if gdelta:
+            ui.writenoi18n(
+                b'deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas)
+            )
+            ui.writenoi18n(
+                b'deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas)
+            )
+            ui.writenoi18n(
+                b'deltas against ancs  : '
+                + fmt % pcfmt(num_other_ancestors, numdeltas)
+            )
+            ui.writenoi18n(
+                b'deltas against other : '
+                + fmt % pcfmt(numother_nad, numdeltas)
+            )
--- a/mercurial/revlogutils/deltas.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/revlogutils/deltas.py	Fri Nov 25 15:14:40 2022 +0100
@@ -655,7 +655,15 @@
 LIMIT_BASE2TEXT = 500
 
 
-def _candidategroups(revlog, textlen, p1, p2, cachedelta):
+def _candidategroups(
+    revlog,
+    textlen,
+    p1,
+    p2,
+    cachedelta,
+    excluded_bases=None,
+    target_rev=None,
+):
     """Provides group of revision to be tested as delta base
 
     This top level function focus on emitting groups with unique and worthwhile
@@ -672,9 +680,15 @@
     good = None
 
     deltas_limit = textlen * LIMIT_DELTA2TEXT
+    group_chunk_size = revlog._candidate_group_chunk_size
 
     tested = {nullrev}
-    candidates = _refinedgroups(revlog, p1, p2, cachedelta)
+    candidates = _refinedgroups(
+        revlog,
+        p1,
+        p2,
+        cachedelta,
+    )
     while True:
         temptative = candidates.send(good)
         if temptative is None:
@@ -694,15 +708,27 @@
             # filter out revision we tested already
             if rev in tested:
                 continue
-            tested.add(rev)
+            # an higher authority deamed the base unworthy (e.g. censored)
+            if excluded_bases is not None and rev in excluded_bases:
+                tested.add(rev)
+                continue
+            # We are in some recomputation cases and that rev is too high in
+            # the revlog
+            if target_rev is not None and rev >= target_rev:
+                tested.add(rev)
+                continue
             # filter out delta base that will never produce good delta
             if deltas_limit < revlog.length(rev):
+                tested.add(rev)
                 continue
             if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
+                tested.add(rev)
                 continue
             # no delta for rawtext-changing revs (see "candelta" for why)
             if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
+                tested.add(rev)
                 continue
+
             # If we reach here, we are about to build and test a delta.
             # The delta building process will compute the chaininfo in all
             # case, since that computation is cached, it is fine to access it
@@ -710,9 +736,11 @@
             chainlen, chainsize = revlog._chaininfo(rev)
             # if chain will be too long, skip base
             if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
+                tested.add(rev)
                 continue
             # if chain already have too much data, skip base
             if deltas_limit < chainsize:
+                tested.add(rev)
                 continue
             if sparse and revlog.upperboundcomp is not None:
                 maxcomp = revlog.upperboundcomp
@@ -731,20 +759,42 @@
                     snapshotlimit = textlen >> snapshotdepth
                     if snapshotlimit < lowestrealisticdeltalen:
                         # delta lower bound is larger than accepted upper bound
+                        tested.add(rev)
                         continue
 
                     # check the relative constraint on the delta size
                     revlength = revlog.length(rev)
                     if revlength < lowestrealisticdeltalen:
                         # delta probable lower bound is larger than target base
+                        tested.add(rev)
                         continue
 
             group.append(rev)
         if group:
-            # XXX: in the sparse revlog case, group can become large,
-            #      impacting performances. Some bounding or slicing mecanism
-            #      would help to reduce this impact.
-            good = yield tuple(group)
+            # When the size of the candidate group is big, it can result in a
+            # quite significant performance impact. To reduce this, we can send
+            # them in smaller batches until the new batch does not provide any
+            # improvements.
+            #
+            # This might reduce the overall efficiency of the compression in
+            # some corner cases, but that should also prevent very pathological
+            # cases from being an issue. (eg. 20 000 candidates).
+            #
+            # XXX note that the ordering of the group becomes important as it
+            # now impacts the final result. The current order is unprocessed
+            # and can be improved.
+            if group_chunk_size == 0:
+                tested.update(group)
+                good = yield tuple(group)
+            else:
+                prev_good = good
+                for start in range(0, len(group), group_chunk_size):
+                    sub_group = group[start : start + group_chunk_size]
+                    tested.update(sub_group)
+                    good = yield tuple(sub_group)
+                    if prev_good == good:
+                        break
+
     yield None
 
 
@@ -768,15 +818,28 @@
     # This logic only applies to general delta repositories and can be disabled
     # through configuration. Disabling reuse source delta is useful when
     # we want to make sure we recomputed "optimal" deltas.
+    debug_info = None
     if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
         # Assume what we received from the server is a good choice
         # build delta will reuse the cache
+        if debug_info is not None:
+            debug_info['cached-delta.tested'] += 1
         good = yield (cachedelta[0],)
         if good is not None:
+            if debug_info is not None:
+                debug_info['cached-delta.accepted'] += 1
             yield None
             return
+    # XXX cache me higher
     snapshots = collections.defaultdict(list)
-    for candidates in _rawgroups(revlog, p1, p2, cachedelta, snapshots):
+    groups = _rawgroups(
+        revlog,
+        p1,
+        p2,
+        cachedelta,
+        snapshots,
+    )
+    for candidates in groups:
         good = yield candidates
         if good is not None:
             break
@@ -805,7 +868,10 @@
             children = tuple(sorted(c for c in snapshots[good]))
             good = yield children
 
-    # we have found nothing
+    if debug_info is not None:
+        if good is None:
+            debug_info['no-solution'] += 1
+
     yield None
 
 
@@ -841,7 +907,7 @@
 
     if sparse and parents:
         if snapshots is None:
-            # map: base-rev: snapshot-rev
+            # map: base-rev: [snapshot-revs]
             snapshots = collections.defaultdict(list)
         # See if we can use an existing snapshot in the parent chains to use as
         # a base for a new intermediate-snapshot
@@ -879,14 +945,14 @@
             # chain.
             max_depth = max(parents_snaps.keys())
             chain = deltachain(other)
-            for idx, s in enumerate(chain):
+            for depth, s in enumerate(chain):
                 if s < snapfloor:
                     continue
-                if max_depth < idx:
+                if max_depth < depth:
                     break
                 if not revlog.issnapshot(s):
                     break
-                parents_snaps[idx].add(s)
+                parents_snaps[depth].add(s)
         # Test them as possible intermediate snapshot base
         # We test them from highest to lowest level. High level one are more
         # likely to result in small delta
@@ -931,10 +997,17 @@
 
 
 class deltacomputer:
-    def __init__(self, revlog, write_debug=None, debug_search=False):
+    def __init__(
+        self,
+        revlog,
+        write_debug=None,
+        debug_search=False,
+        debug_info=None,
+    ):
         self.revlog = revlog
         self._write_debug = write_debug
         self._debug_search = debug_search
+        self._debug_info = debug_info
 
     def buildtext(self, revinfo, fh):
         """Builds a fulltext version of a revision
@@ -1103,11 +1176,14 @@
         if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
             return self._fullsnapshotinfo(fh, revinfo, target_rev)
 
-        if self._write_debug is not None:
+        gather_debug = (
+            self._write_debug is not None or self._debug_info is not None
+        )
+        debug_search = self._write_debug is not None and self._debug_search
+
+        if gather_debug:
             start = util.timer()
 
-        debug_search = self._write_debug is not None and self._debug_search
-
         # count the number of different delta we tried (for debug purpose)
         dbg_try_count = 0
         # count the number of "search round" we did. (for debug purpose)
@@ -1122,7 +1198,7 @@
         deltainfo = None
         p1r, p2r = revlog.rev(p1), revlog.rev(p2)
 
-        if self._write_debug is not None:
+        if gather_debug:
             if p1r != nullrev:
                 p1_chain_len = revlog._chaininfo(p1r)[0]
             else:
@@ -1137,7 +1213,13 @@
             self._write_debug(msg)
 
         groups = _candidategroups(
-            self.revlog, revinfo.textlen, p1r, p2r, cachedelta
+            self.revlog,
+            revinfo.textlen,
+            p1r,
+            p2r,
+            cachedelta,
+            excluded_bases,
+            target_rev,
         )
         candidaterevs = next(groups)
         while candidaterevs is not None:
@@ -1147,7 +1229,13 @@
                 if deltainfo is not None:
                     prev = deltainfo.base
 
-                if p1 in candidaterevs or p2 in candidaterevs:
+                if (
+                    cachedelta is not None
+                    and len(candidaterevs) == 1
+                    and cachedelta[0] in candidaterevs
+                ):
+                    round_type = b"cached-delta"
+                elif p1 in candidaterevs or p2 in candidaterevs:
                     round_type = b"parents"
                 elif prev is not None and all(c < prev for c in candidaterevs):
                     round_type = b"refine-down"
@@ -1195,16 +1283,7 @@
                     msg = b"DBG-DELTAS-SEARCH:     base=%d\n"
                     msg %= self.revlog.deltaparent(candidaterev)
                     self._write_debug(msg)
-                if candidaterev in excluded_bases:
-                    if debug_search:
-                        msg = b"DBG-DELTAS-SEARCH:     EXCLUDED\n"
-                        self._write_debug(msg)
-                    continue
-                if candidaterev >= target_rev:
-                    if debug_search:
-                        msg = b"DBG-DELTAS-SEARCH:     TOO-HIGH\n"
-                        self._write_debug(msg)
-                    continue
+
                 dbg_try_count += 1
 
                 if debug_search:
@@ -1244,12 +1323,20 @@
         else:
             dbg_type = b"delta"
 
-        if self._write_debug is not None:
+        if gather_debug:
             end = util.timer()
+            used_cached = (
+                cachedelta is not None
+                and dbg_try_rounds == 1
+                and dbg_try_count == 1
+                and deltainfo.base == cachedelta[0]
+            )
             dbg = {
                 'duration': end - start,
                 'revision': target_rev,
+                'delta-base': deltainfo.base,  # pytype: disable=attribute-error
                 'search_round_count': dbg_try_rounds,
+                'using-cached-base': used_cached,
                 'delta_try_count': dbg_try_count,
                 'type': dbg_type,
                 'p1-chain-len': p1_chain_len,
@@ -1279,31 +1366,39 @@
                     target_revlog += b'%s:' % target_key
             dbg['target-revlog'] = target_revlog
 
-            msg = (
-                b"DBG-DELTAS:"
-                b" %-12s"
-                b" rev=%d:"
-                b" search-rounds=%d"
-                b" try-count=%d"
-                b" - delta-type=%-6s"
-                b" snap-depth=%d"
-                b" - p1-chain-length=%d"
-                b" p2-chain-length=%d"
-                b" - duration=%f"
-                b"\n"
-            )
-            msg %= (
-                dbg["target-revlog"],
-                dbg["revision"],
-                dbg["search_round_count"],
-                dbg["delta_try_count"],
-                dbg["type"],
-                dbg["snapshot-depth"],
-                dbg["p1-chain-len"],
-                dbg["p2-chain-len"],
-                dbg["duration"],
-            )
-            self._write_debug(msg)
+            if self._debug_info is not None:
+                self._debug_info.append(dbg)
+
+            if self._write_debug is not None:
+                msg = (
+                    b"DBG-DELTAS:"
+                    b" %-12s"
+                    b" rev=%d:"
+                    b" delta-base=%d"
+                    b" is-cached=%d"
+                    b" - search-rounds=%d"
+                    b" try-count=%d"
+                    b" - delta-type=%-6s"
+                    b" snap-depth=%d"
+                    b" - p1-chain-length=%d"
+                    b" p2-chain-length=%d"
+                    b" - duration=%f"
+                    b"\n"
+                )
+                msg %= (
+                    dbg["target-revlog"],
+                    dbg["revision"],
+                    dbg["delta-base"],
+                    dbg["using-cached-base"],
+                    dbg["search_round_count"],
+                    dbg["delta_try_count"],
+                    dbg["type"],
+                    dbg["snapshot-depth"],
+                    dbg["p1-chain-len"],
+                    dbg["p2-chain-len"],
+                    dbg["duration"],
+                )
+                self._write_debug(msg)
         return deltainfo
 
 
--- a/mercurial/shelve.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/shelve.py	Fri Nov 25 15:14:40 2022 +0100
@@ -247,6 +247,14 @@
         for ext in shelvefileextensions:
             self.vfs.tryunlink(self.name + b'.' + ext)
 
+    def changed_files(self, ui, repo):
+        try:
+            ctx = repo.unfiltered()[self.readinfo()[b'node']]
+            return ctx.files()
+        except (FileNotFoundError, error.RepoLookupError):
+            filename = self.vfs.join(self.name + b'.patch')
+            return patch.changedfiles(ui, repo, filename)
+
 
 def _optimized_match(repo, node):
     """
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/LICENSE	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack and the attrs contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
--- a/mercurial/thirdparty/attr/LICENSE.txt	Thu Nov 24 10:34:34 2022 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Hynek Schlawack
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
--- a/mercurial/thirdparty/attr/__init__.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/__init__.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,37 +1,35 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
+
+import sys
+
+from functools import partial
 
-from ._funcs import (
-    asdict,
-    assoc,
-    astuple,
-    evolve,
-    has,
-)
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
 from ._make import (
+    NOTHING,
     Attribute,
     Factory,
-    NOTHING,
-    attr,
-    attributes,
+    attrib,
+    attrs,
     fields,
+    fields_dict,
     make_class,
     validate,
 )
-from ._config import (
-    get_run_validators,
-    set_run_validators,
-)
-from . import exceptions
-from . import filters
-from . import converters
-from . import validators
+from ._version_info import VersionInfo
 
 
-__version__ = "17.2.0"
+__version__ = "22.1.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
 
 __title__ = "attrs"
 __description__ = "Classes Without Boilerplate"
-__uri__ = "http://www.attrs.org/"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
 __doc__ = __description__ + " <" + __uri__ + ">"
 
 __author__ = "Hynek Schlawack"
@@ -41,8 +39,9 @@
 __copyright__ = "Copyright (c) 2015 Hynek Schlawack"
 
 
-s = attrs = attributes
-ib = attrib = attr
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True)  # happy Easter ;)
 
 __all__ = [
     "Attribute",
@@ -55,17 +54,26 @@
     "attrib",
     "attributes",
     "attrs",
+    "cmp_using",
     "converters",
     "evolve",
     "exceptions",
     "fields",
+    "fields_dict",
     "filters",
     "get_run_validators",
     "has",
     "ib",
     "make_class",
+    "resolve_types",
     "s",
     "set_run_validators",
+    "setters",
     "validate",
     "validators",
 ]
+
+if sys.version_info[:2] >= (3, 6):
+    from ._next_gen import define, field, frozen, mutable  # noqa: F401
+
+    __all__.extend(("define", "field", "frozen", "mutable"))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/__init__.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,486 @@
+import sys
+
+from typing import (
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    Generic,
+    List,
+    Mapping,
+    Optional,
+    Protocol,
+    Sequence,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._version_info import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+    _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[
+    [type, List[Attribute[Any]]], List[Attribute[Any]]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# A protocol to be able to statically accept an attrs class.
+class AttrsInstance(Protocol):
+    __attrs_attrs__: ClassVar[Any]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+    from typing import Literal
+    @overload
+    def Factory(factory: Callable[[], _T]) -> _T: ...
+    @overload
+    def Factory(
+        factory: Callable[[Any], _T],
+        takes_self: Literal[True],
+    ) -> _T: ...
+    @overload
+    def Factory(
+        factory: Callable[[], _T],
+        takes_self: Literal[False],
+    ) -> _T: ...
+
+else:
+    @overload
+    def Factory(factory: Callable[[], _T]) -> _T: ...
+    @overload
+    def Factory(
+        factory: Union[Callable[[Any], _T], Callable[[], _T]],
+        takes_self: bool = ...,
+    ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime.  Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+    *,
+    eq_default: bool = True,
+    order_default: bool = False,
+    kw_only_default: bool = False,
+    field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+    name: str
+    default: Optional[_T]
+    validator: Optional[_ValidatorType[_T]]
+    repr: _ReprArgType
+    cmp: _EqOrderType
+    eq: _EqOrderType
+    order: _EqOrderType
+    hash: Optional[bool]
+    init: bool
+    converter: Optional[_ConverterType]
+    metadata: Dict[Any, Any]
+    type: Optional[Type[_T]]
+    kw_only: bool
+    on_setattr: _OnSetAttrType
+    def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+#   - Pros: Handles simple cases correctly
+#   - Cons: Might produce less informative errors in the case of conflicting
+#     TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+#   - Pros: Better error messages than #1 for conflicting TypeVars
+#   - Cons: Terrible error messages for validator checks.
+#   e.g. attr.ib(type=int, validator=validate_str)
+#        -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+#   - Pros: Simple here, and we could customize the plugin with our own errors.
+#   - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+#     attr()    -> Any
+#     attr(8)   -> int
+#     attr(validator=<some callable>)  -> Whatever the callable expects.
+# This makes this type of assignments possible:
+#     x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+    default: None = ...,
+    validator: None = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    type: None = ...,
+    converter: None = ...,
+    factory: None = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+    default: None = ...,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    type: Optional[Type[_T]] = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+    default: _T,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    type: Optional[Type[_T]] = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+    default: Optional[_T] = ...,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    type: object = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+    *,
+    default: None = ...,
+    validator: None = ...,
+    repr: _ReprArgType = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    converter: None = ...,
+    factory: None = ...,
+    kw_only: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+    *,
+    default: None = ...,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+    *,
+    default: _T,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+    *,
+    default: Optional[_T] = ...,
+    validator: Optional[_ValidatorArgType[_T]] = ...,
+    repr: _ReprArgType = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    metadata: Optional[Mapping[Any, Any]] = ...,
+    converter: Optional[_ConverterType] = ...,
+    factory: Optional[Callable[[], _T]] = ...,
+    kw_only: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+    maybe_cls: _C,
+    these: Optional[Dict[str, Any]] = ...,
+    repr_ns: Optional[str] = ...,
+    repr: bool = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    auto_detect: bool = ...,
+    collect_by_mro: bool = ...,
+    getstate_setstate: Optional[bool] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+    field_transformer: Optional[_FieldTransformer] = ...,
+    match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+    maybe_cls: None = ...,
+    these: Optional[Dict[str, Any]] = ...,
+    repr_ns: Optional[str] = ...,
+    repr: bool = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    auto_detect: bool = ...,
+    collect_by_mro: bool = ...,
+    getstate_setstate: Optional[bool] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+    field_transformer: Optional[_FieldTransformer] = ...,
+    match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+    maybe_cls: _C,
+    *,
+    these: Optional[Dict[str, Any]] = ...,
+    repr: bool = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
+    auto_detect: bool = ...,
+    getstate_setstate: Optional[bool] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+    field_transformer: Optional[_FieldTransformer] = ...,
+    match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+    maybe_cls: None = ...,
+    *,
+    these: Optional[Dict[str, Any]] = ...,
+    repr: bool = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: Optional[bool] = ...,
+    order: Optional[bool] = ...,
+    auto_detect: bool = ...,
+    getstate_setstate: Optional[bool] = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+    field_transformer: Optional[_FieldTransformer] = ...,
+    match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define  # they differ only in their defaults
+
+def fields(cls: Type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+    cls: _C,
+    globalns: Optional[Dict[str, Any]] = ...,
+    localns: Optional[Dict[str, Any]] = ...,
+    attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+    name: str,
+    attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+    bases: Tuple[type, ...] = ...,
+    repr_ns: Optional[str] = ...,
+    repr: bool = ...,
+    cmp: Optional[_EqOrderType] = ...,
+    hash: Optional[bool] = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: Optional[_EqOrderType] = ...,
+    order: Optional[_EqOrderType] = ...,
+    collect_by_mro: bool = ...,
+    on_setattr: Optional[_OnSetAttrArgType] = ...,
+    field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+    inst: AttrsInstance,
+    recurse: bool = ...,
+    filter: Optional[_FilterType[Any]] = ...,
+    dict_factory: Type[Mapping[Any, Any]] = ...,
+    retain_collection_types: bool = ...,
+    value_serializer: Optional[
+        Callable[[type, Attribute[Any], Any], Any]
+    ] = ...,
+    tuple_keys: Optional[bool] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+    inst: AttrsInstance,
+    recurse: bool = ...,
+    filter: Optional[_FilterType[Any]] = ...,
+    tuple_factory: Type[Sequence[Any]] = ...,
+    retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs  # Technically, partial(attrs, auto_attribs=True) ;)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_cmp.py	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+    eq=None,
+    lt=None,
+    le=None,
+    gt=None,
+    ge=None,
+    require_same_type=True,
+    class_name="Comparable",
+):
+    """
+    Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+    ``cmp`` arguments to customize field comparison.
+
+    The resulting class will have a full set of ordering methods if
+    at least one of ``{lt, le, gt, ge}`` and ``eq``  are provided.
+
+    :param Optional[callable] eq: `callable` used to evaluate equality
+        of two objects.
+    :param Optional[callable] lt: `callable` used to evaluate whether
+        one object is less than another object.
+    :param Optional[callable] le: `callable` used to evaluate whether
+        one object is less than or equal to another object.
+    :param Optional[callable] gt: `callable` used to evaluate whether
+        one object is greater than another object.
+    :param Optional[callable] ge: `callable` used to evaluate whether
+        one object is greater than or equal to another object.
+
+    :param bool require_same_type: When `True`, equality and ordering methods
+        will return `NotImplemented` if objects are not of the same type.
+
+    :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+    See `comparison` for more details.
+
+    .. versionadded:: 21.1.0
+    """
+
+    body = {
+        "__slots__": ["value"],
+        "__init__": _make_init(),
+        "_requirements": [],
+        "_is_comparable_to": _is_comparable_to,
+    }
+
+    # Add operations.
+    num_order_functions = 0
+    has_eq_function = False
+
+    if eq is not None:
+        has_eq_function = True
+        body["__eq__"] = _make_operator("eq", eq)
+        body["__ne__"] = _make_ne()
+
+    if lt is not None:
+        num_order_functions += 1
+        body["__lt__"] = _make_operator("lt", lt)
+
+    if le is not None:
+        num_order_functions += 1
+        body["__le__"] = _make_operator("le", le)
+
+    if gt is not None:
+        num_order_functions += 1
+        body["__gt__"] = _make_operator("gt", gt)
+
+    if ge is not None:
+        num_order_functions += 1
+        body["__ge__"] = _make_operator("ge", ge)
+
+    type_ = types.new_class(
+        class_name, (object,), {}, lambda ns: ns.update(body)
+    )
+
+    # Add same type requirement.
+    if require_same_type:
+        type_._requirements.append(_check_same_type)
+
+    # Add total ordering if at least one operation was defined.
+    if 0 < num_order_functions < 4:
+        if not has_eq_function:
+            # functools.total_ordering requires __eq__ to be defined,
+            # so raise early error here to keep a nice stack.
+            raise ValueError(
+                "eq must be define is order to complete ordering from "
+                "lt, le, gt, ge."
+            )
+        type_ = functools.total_ordering(type_)
+
+    return type_
+
+
+def _make_init():
+    """
+    Create __init__ method.
+    """
+
+    def __init__(self, value):
+        """
+        Initialize object with *value*.
+        """
+        self.value = value
+
+    return __init__
+
+
+def _make_operator(name, func):
+    """
+    Create operator method.
+    """
+
+    def method(self, other):
+        if not self._is_comparable_to(other):
+            return NotImplemented
+
+        result = func(self.value, other.value)
+        if result is NotImplemented:
+            return NotImplemented
+
+        return result
+
+    method.__name__ = "__%s__" % (name,)
+    method.__doc__ = "Return a %s b.  Computed by attrs." % (
+        _operation_names[name],
+    )
+
+    return method
+
+
+def _is_comparable_to(self, other):
+    """
+    Check whether `other` is comparable to `self`.
+    """
+    for func in self._requirements:
+        if not func(self, other):
+            return False
+    return True
+
+
+def _check_same_type(self, other):
+    """
+    Return True if *self* and *other* are of the same type, False otherwise.
+    """
+    return other.value.__class__ is self.value.__class__
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_cmp.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,13 @@
+from typing import Any, Callable, Optional, Type
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+    eq: Optional[_CompareWithType],
+    lt: Optional[_CompareWithType],
+    le: Optional[_CompareWithType],
+    gt: Optional[_CompareWithType],
+    ge: Optional[_CompareWithType],
+    require_same_type: bool,
+    class_name: str,
+) -> Type: ...
--- a/mercurial/thirdparty/attr/_compat.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/_compat.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,90 +1,185 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
+
+import inspect
+import platform
+import sys
+import threading
+import types
+import warnings
+
+from collections.abc import Mapping, Sequence  # noqa
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY36 = sys.version_info[:2] >= (3, 6)
+HAS_F_STRINGS = PY36
+PY310 = sys.version_info[:2] >= (3, 10)
 
-import sys
-import types
+
+if PYPY or PY36:
+    ordered_dict = dict
+else:
+    from collections import OrderedDict
+
+    ordered_dict = OrderedDict
+
+
+def just_warn(*args, **kw):
+    warnings.warn(
+        "Running interpreter doesn't sufficiently support code object "
+        "introspection.  Some features like bare super() or accessing "
+        "__class__ will not work with slotted classes.",
+        RuntimeWarning,
+        stacklevel=2,
+    )
 
 
-PY2 = sys.version_info[0] == 2
+class _AnnotationExtractor:
+    """
+    Extract type annotations from a callable, returning None whenever there
+    is none.
+    """
+
+    __slots__ = ["sig"]
+
+    def __init__(self, callable):
+        try:
+            self.sig = inspect.signature(callable)
+        except (ValueError, TypeError):  # inspect failed
+            self.sig = None
+
+    def get_first_param_type(self):
+        """
+        Return the type annotation of the first argument if it's not empty.
+        """
+        if not self.sig:
+            return None
+
+        params = list(self.sig.parameters.values())
+        if params and params[0].annotation is not inspect.Parameter.empty:
+            return params[0].annotation
+
+        return None
+
+    def get_return_type(self):
+        """
+        Return the return type if it's not empty.
+        """
+        if (
+            self.sig
+            and self.sig.return_annotation is not inspect.Signature.empty
+        ):
+            return self.sig.return_annotation
+
+        return None
 
 
-if PY2:
-    from UserDict import IterableUserDict
-
-    # We 'bundle' isclass instead of using inspect as importing inspect is
-    # fairly expensive (order of 10-15 ms for a modern machine in 2016)
-    def isclass(klass):
-        return isinstance(klass, (type, types.ClassType))
+def make_set_closure_cell():
+    """Return a function of two arguments (cell, value) which sets
+    the value stored in the closure cell `cell` to `value`.
+    """
+    # pypy makes this easy. (It also supports the logic below, but
+    # why not do the easy/fast thing?)
+    if PYPY:
 
-    # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
-    TYPE = "type"
+        def set_closure_cell(cell, value):
+            cell.__setstate__((value,))
+
+        return set_closure_cell
 
-    def iteritems(d):
-        return d.iteritems()
+    # Otherwise gotta do it the hard way.
 
-    def iterkeys(d):
-        return d.iterkeys()
+    # Create a function that will set its first cellvar to `value`.
+    def set_first_cellvar_to(value):
+        x = value
+        return
 
-    # Python 2 is bereft of a read-only dict proxy, so we make one!
-    class ReadOnlyDict(IterableUserDict):
-        """
-        Best-effort read-only dict wrapper.
-        """
+        # This function will be eliminated as dead code, but
+        # not before its reference to `x` forces `x` to be
+        # represented as a closure cell rather than a local.
+        def force_x_to_be_a_cell():  # pragma: no cover
+            return x
 
-        def __setitem__(self, key, val):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise TypeError("'mappingproxy' object does not support item "
-                            "assignment")
+    try:
+        # Extract the code object and make sure our assumptions about
+        # the closure behavior are correct.
+        co = set_first_cellvar_to.__code__
+        if co.co_cellvars != ("x",) or co.co_freevars != ():
+            raise AssertionError  # pragma: no cover
 
-        def update(self, _):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise AttributeError("'mappingproxy' object has no attribute "
-                                 "'update'")
+        # Convert this code object to a code object that sets the
+        # function's first _freevar_ (not cellvar) to the argument.
+        if sys.version_info >= (3, 8):
 
-        def __delitem__(self, _):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise TypeError("'mappingproxy' object does not support item "
-                            "deletion")
+            def set_closure_cell(cell, value):
+                cell.cell_contents = value
 
-        def clear(self):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise AttributeError("'mappingproxy' object has no attribute "
-                                 "'clear'")
-
-        def pop(self, key, default=None):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise AttributeError("'mappingproxy' object has no attribute "
-                                 "'pop'")
+        else:
+            args = [co.co_argcount]
+            args.append(co.co_kwonlyargcount)
+            args.extend(
+                [
+                    co.co_nlocals,
+                    co.co_stacksize,
+                    co.co_flags,
+                    co.co_code,
+                    co.co_consts,
+                    co.co_names,
+                    co.co_varnames,
+                    co.co_filename,
+                    co.co_name,
+                    co.co_firstlineno,
+                    co.co_lnotab,
+                    # These two arguments are reversed:
+                    co.co_cellvars,
+                    co.co_freevars,
+                ]
+            )
+            set_first_freevar_code = types.CodeType(*args)
 
-        def popitem(self):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise AttributeError("'mappingproxy' object has no attribute "
-                                 "'popitem'")
-
-        def setdefault(self, key, default=None):
-            # We gently pretend we're a Python 3 mappingproxy.
-            raise AttributeError("'mappingproxy' object has no attribute "
-                                 "'setdefault'")
+            def set_closure_cell(cell, value):
+                # Create a function using the set_first_freevar_code,
+                # whose first closure cell is `cell`. Calling it will
+                # change the value of that cell.
+                setter = types.FunctionType(
+                    set_first_freevar_code, {}, "setter", (), (cell,)
+                )
+                # And call it to set the cell.
+                setter(value)
 
-        def __repr__(self):
-            # Override to be identical to the Python 3 version.
-            return "mappingproxy(" + repr(self.data) + ")"
+        # Make sure it works on this interpreter:
+        def make_func_with_cell():
+            x = None
+
+            def func():
+                return x  # pragma: no cover
 
-    def metadata_proxy(d):
-        res = ReadOnlyDict()
-        res.data.update(d)  # We blocked update, so we have to do it like this.
-        return res
+            return func
+
+        cell = make_func_with_cell().__closure__[0]
+        set_closure_cell(cell, 100)
+        if cell.cell_contents != 100:
+            raise AssertionError  # pragma: no cover
 
-else:
-    def isclass(klass):
-        return isinstance(klass, type)
+    except Exception:
+        return just_warn
+    else:
+        return set_closure_cell
 
-    TYPE = "class"
+
+set_closure_cell = make_set_closure_cell()
 
-    def iteritems(d):
-        return d.items()
-
-    def iterkeys(d):
-        return d.keys()
-
-    def metadata_proxy(d):
-        return types.MappingProxyType(dict(d))
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
--- a/mercurial/thirdparty/attr/_config.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/_config.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,4 +1,4 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
 
 
 __all__ = ["set_run_validators", "get_run_validators"]
@@ -9,6 +9,10 @@
 def set_run_validators(run):
     """
     Set whether or not validators are run.  By default, they are run.
+
+    .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+        moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+        instead.
     """
     if not isinstance(run, bool):
         raise TypeError("'run' must be bool.")
@@ -19,5 +23,9 @@
 def get_run_validators():
     """
     Return whether or not validators are run.
+
+    .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+        moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+        instead.
     """
     return _run_validators
--- a/mercurial/thirdparty/attr/_funcs.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/_funcs.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,14 +1,20 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
 
 import copy
 
-from ._compat import iteritems
-from ._make import NOTHING, fields, _obj_setattr
+from ._make import NOTHING, _obj_setattr, fields
 from .exceptions import AttrsAttributeNotFoundError
 
 
-def asdict(inst, recurse=True, filter=None, dict_factory=dict,
-           retain_collection_types=False):
+def asdict(
+    inst,
+    recurse=True,
+    filter=None,
+    dict_factory=dict,
+    retain_collection_types=False,
+    value_serializer=None,
+):
     """
     Return the ``attrs`` attribute values of *inst* as a dict.
 
@@ -17,9 +23,9 @@
     :param inst: Instance of an ``attrs``-decorated class.
     :param bool recurse: Recurse into classes that are also
         ``attrs``-decorated.
-    :param callable filter: A callable whose return code deteremines whether an
+    :param callable filter: A callable whose return code determines whether an
         attribute or element is included (``True``) or dropped (``False``).  Is
-        called with the :class:`attr.Attribute` as the first argument and the
+        called with the `attrs.Attribute` as the first argument and the
         value as the second argument.
     :param callable dict_factory: A callable to produce dictionaries from.  For
         example, to produce ordered dictionaries instead of normal Python
@@ -27,6 +33,10 @@
     :param bool retain_collection_types: Do not convert to ``list`` when
         encountering an attribute whose type is ``tuple`` or ``set``.  Only
         meaningful if ``recurse`` is ``True``.
+    :param Optional[callable] value_serializer: A hook that is called for every
+        attribute or dict key/value.  It receives the current instance, field
+        and value and must return the (updated) value.  The hook is run *after*
+        the optional *filter* has been applied.
 
     :rtype: return type of *dict_factory*
 
@@ -35,6 +45,9 @@
 
     ..  versionadded:: 16.0.0 *dict_factory*
     ..  versionadded:: 16.1.0 *retain_collection_types*
+    ..  versionadded:: 20.3.0 *value_serializer*
+    ..  versionadded:: 21.3.0 If a dict has a collection for a key, it is
+        serialized as a tuple.
     """
     attrs = fields(inst.__class__)
     rv = dict_factory()
@@ -42,24 +55,58 @@
         v = getattr(inst, a.name)
         if filter is not None and not filter(a, v):
             continue
+
+        if value_serializer is not None:
+            v = value_serializer(inst, a, v)
+
         if recurse is True:
             if has(v.__class__):
-                rv[a.name] = asdict(v, recurse=True, filter=filter,
-                                    dict_factory=dict_factory)
-            elif isinstance(v, (tuple, list, set)):
+                rv[a.name] = asdict(
+                    v,
+                    recurse=True,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
                 cf = v.__class__ if retain_collection_types is True else list
-                rv[a.name] = cf([
-                    asdict(i, recurse=True, filter=filter,
-                           dict_factory=dict_factory)
-                    if has(i.__class__) else i
-                    for i in v
-                ])
+                rv[a.name] = cf(
+                    [
+                        _asdict_anything(
+                            i,
+                            is_key=False,
+                            filter=filter,
+                            dict_factory=dict_factory,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        )
+                        for i in v
+                    ]
+                )
             elif isinstance(v, dict):
                 df = dict_factory
-                rv[a.name] = df((
-                    asdict(kk, dict_factory=df) if has(kk.__class__) else kk,
-                    asdict(vv, dict_factory=df) if has(vv.__class__) else vv)
-                    for kk, vv in iteritems(v))
+                rv[a.name] = df(
+                    (
+                        _asdict_anything(
+                            kk,
+                            is_key=True,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                        _asdict_anything(
+                            vv,
+                            is_key=False,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                    )
+                    for kk, vv in v.items()
+                )
             else:
                 rv[a.name] = v
         else:
@@ -67,8 +114,86 @@
     return rv
 
 
-def astuple(inst, recurse=True, filter=None, tuple_factory=tuple,
-            retain_collection_types=False):
+def _asdict_anything(
+    val,
+    is_key,
+    filter,
+    dict_factory,
+    retain_collection_types,
+    value_serializer,
+):
+    """
+    ``asdict`` only works on attrs instances, this works on anything.
+    """
+    if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+        # Attrs class.
+        rv = asdict(
+            val,
+            recurse=True,
+            filter=filter,
+            dict_factory=dict_factory,
+            retain_collection_types=retain_collection_types,
+            value_serializer=value_serializer,
+        )
+    elif isinstance(val, (tuple, list, set, frozenset)):
+        if retain_collection_types is True:
+            cf = val.__class__
+        elif is_key:
+            cf = tuple
+        else:
+            cf = list
+
+        rv = cf(
+            [
+                _asdict_anything(
+                    i,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+                for i in val
+            ]
+        )
+    elif isinstance(val, dict):
+        df = dict_factory
+        rv = df(
+            (
+                _asdict_anything(
+                    kk,
+                    is_key=True,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+                _asdict_anything(
+                    vv,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+            )
+            for kk, vv in val.items()
+        )
+    else:
+        rv = val
+        if value_serializer is not None:
+            rv = value_serializer(None, None, rv)
+
+    return rv
+
+
+def astuple(
+    inst,
+    recurse=True,
+    filter=None,
+    tuple_factory=tuple,
+    retain_collection_types=False,
+):
     """
     Return the ``attrs`` attribute values of *inst* as a tuple.
 
@@ -79,7 +204,7 @@
         ``attrs``-decorated.
     :param callable filter: A callable whose return code determines whether an
         attribute or element is included (``True``) or dropped (``False``).  Is
-        called with the :class:`attr.Attribute` as the first argument and the
+        called with the `attrs.Attribute` as the first argument and the
         value as the second argument.
     :param callable tuple_factory: A callable to produce tuples from.  For
         example, to produce lists instead of tuples.
@@ -104,38 +229,61 @@
             continue
         if recurse is True:
             if has(v.__class__):
-                rv.append(astuple(v, recurse=True, filter=filter,
-                                  tuple_factory=tuple_factory,
-                                  retain_collection_types=retain))
-            elif isinstance(v, (tuple, list, set)):
+                rv.append(
+                    astuple(
+                        v,
+                        recurse=True,
+                        filter=filter,
+                        tuple_factory=tuple_factory,
+                        retain_collection_types=retain,
+                    )
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
                 cf = v.__class__ if retain is True else list
-                rv.append(cf([
-                    astuple(j, recurse=True, filter=filter,
-                            tuple_factory=tuple_factory,
-                            retain_collection_types=retain)
-                    if has(j.__class__) else j
-                    for j in v
-                ]))
+                rv.append(
+                    cf(
+                        [
+                            astuple(
+                                j,
+                                recurse=True,
+                                filter=filter,
+                                tuple_factory=tuple_factory,
+                                retain_collection_types=retain,
+                            )
+                            if has(j.__class__)
+                            else j
+                            for j in v
+                        ]
+                    )
+                )
             elif isinstance(v, dict):
                 df = v.__class__ if retain is True else dict
-                rv.append(df(
+                rv.append(
+                    df(
                         (
                             astuple(
                                 kk,
                                 tuple_factory=tuple_factory,
-                                retain_collection_types=retain
-                            ) if has(kk.__class__) else kk,
+                                retain_collection_types=retain,
+                            )
+                            if has(kk.__class__)
+                            else kk,
                             astuple(
                                 vv,
                                 tuple_factory=tuple_factory,
-                                retain_collection_types=retain
-                            ) if has(vv.__class__) else vv
+                                retain_collection_types=retain,
+                            )
+                            if has(vv.__class__)
+                            else vv,
                         )
-                        for kk, vv in iteritems(v)))
+                        for kk, vv in v.items()
+                    )
+                )
             else:
                 rv.append(v)
         else:
             rv.append(v)
+
     return rv if tuple_factory is list else tuple_factory(rv)
 
 
@@ -146,7 +294,7 @@
     :param type cls: Class to introspect.
     :raise TypeError: If *cls* is not a class.
 
-    :rtype: :class:`bool`
+    :rtype: bool
     """
     return getattr(cls, "__attrs_attrs__", None) is not None
 
@@ -166,19 +314,26 @@
         class.
 
     ..  deprecated:: 17.1.0
-        Use :func:`evolve` instead.
+        Use `attrs.evolve` instead if you can.
+        This function will not be removed du to the slightly different approach
+        compared to `attrs.evolve`.
     """
     import warnings
-    warnings.warn("assoc is deprecated and will be removed after 2018/01.",
-                  DeprecationWarning)
+
+    warnings.warn(
+        "assoc is deprecated and will be removed after 2018/01.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
     new = copy.copy(inst)
     attrs = fields(inst.__class__)
-    for k, v in iteritems(changes):
+    for k, v in changes.items():
         a = getattr(attrs, k, NOTHING)
         if a is NOTHING:
             raise AttrsAttributeNotFoundError(
-                "{k} is not an attrs attribute on {cl}."
-                .format(k=k, cl=new.__class__)
+                "{k} is not an attrs attribute on {cl}.".format(
+                    k=k, cl=new.__class__
+                )
             )
         _obj_setattr(new, k, v)
     return new
@@ -209,4 +364,57 @@
         init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
         if init_name not in changes:
             changes[init_name] = getattr(inst, attr_name)
+
     return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+    """
+    Resolve any strings and forward annotations in type annotations.
+
+    This is only required if you need concrete types in `Attribute`'s *type*
+    field. In other words, you don't need to resolve your types if you only
+    use them for static type checking.
+
+    With no arguments, names will be looked up in the module in which the class
+    was created. If this is not what you want, e.g. if the name only exists
+    inside a method, you may pass *globalns* or *localns* to specify other
+    dictionaries in which to look up these names. See the docs of
+    `typing.get_type_hints` for more details.
+
+    :param type cls: Class to resolve.
+    :param Optional[dict] globalns: Dictionary containing global variables.
+    :param Optional[dict] localns: Dictionary containing local variables.
+    :param Optional[list] attribs: List of attribs for the given class.
+        This is necessary when calling from inside a ``field_transformer``
+        since *cls* is not an ``attrs`` class yet.
+
+    :raise TypeError: If *cls* is not a class.
+    :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+        class and you didn't pass any attribs.
+    :raise NameError: If types cannot be resolved because of missing variables.
+
+    :returns: *cls* so you can use this function also as a class decorator.
+        Please note that you have to apply it **after** `attrs.define`. That
+        means the decorator has to come in the line **before** `attrs.define`.
+
+    ..  versionadded:: 20.1.0
+    ..  versionadded:: 21.1.0 *attribs*
+
+    """
+    # Since calling get_type_hints is expensive we cache whether we've
+    # done it already.
+    if getattr(cls, "__attrs_types_resolved__", None) != cls:
+        import typing
+
+        hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+        for field in fields(cls) if attribs is None else attribs:
+            if field.name in hints:
+                # Since fields have been frozen we must work around it.
+                _obj_setattr(field, "type", hints[field.name])
+        # We store the class we resolved so that subclasses know they haven't
+        # been resolved.
+        cls.__attrs_types_resolved__ = cls
+
+    # Return the class so you can use it as a decorator too.
+    return cls
--- a/mercurial/thirdparty/attr/_make.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/_make.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,50 +1,79 @@
-from __future__ import absolute_import, division, print_function
-
-import hashlib
+# SPDX-License-Identifier: MIT
+
+import copy
 import linecache
+import sys
+import types
+import typing
 
 from operator import itemgetter
 
-from . import _config
-from ._compat import PY2, iteritems, isclass, iterkeys, metadata_proxy
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+    HAS_F_STRINGS,
+    PY310,
+    PYPY,
+    _AnnotationExtractor,
+    ordered_dict,
+    set_closure_cell,
+)
 from .exceptions import (
     DefaultAlreadySetError,
     FrozenInstanceError,
     NotAnAttrsClassError,
+    UnannotatedAttributeError,
 )
 
 
 # This is used at least twice, so cache it here.
 _obj_setattr = object.__setattr__
-_init_convert_pat = "__attr_convert_{}"
+_init_converter_pat = "__attr_converter_%s"
 _init_factory_pat = "__attr_factory_{}"
-_tuple_property_pat = "    {attr_name} = property(itemgetter({index}))"
-_empty_metadata_singleton = metadata_proxy({})
-
-
-class _Nothing(object):
+_tuple_property_pat = (
+    "    {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+    "typing.ClassVar",
+    "t.ClassVar",
+    "ClassVar",
+    "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing:
     """
     Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
 
-    All instances of `_Nothing` are equal.
+    ``_Nothing`` is a singleton. There is only ever one of it.
+
+    .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
     """
-    def __copy__(self):
-        return self
-
-    def __deepcopy__(self, _):
-        return self
-
-    def __eq__(self, other):
-        return other.__class__ == _Nothing
-
-    def __ne__(self, other):
-        return not self == other
+
+    _singleton = None
+
+    def __new__(cls):
+        if _Nothing._singleton is None:
+            _Nothing._singleton = super().__new__(cls)
+        return _Nothing._singleton
 
     def __repr__(self):
         return "NOTHING"
 
-    def __hash__(self):
-        return 0xdeadbeef
+    def __bool__(self):
+        return False
 
 
 NOTHING = _Nothing()
@@ -53,92 +82,255 @@
 """
 
 
-def attr(default=NOTHING, validator=None,
-         repr=True, cmp=True, hash=None, init=True,
-         convert=None, metadata={}):
-    r"""
+class _CacheHashWrapper(int):
+    """
+    An integer subclass that pickles / copies as None
+
+    This is used for non-slots classes with ``cache_hash=True``, to avoid
+    serializing a potentially (even likely) invalid hash value. Since ``None``
+    is the default value for uncalculated hashes, whenever this is copied,
+    the copy's value for the hash should automatically reset.
+
+    See GH #613 for more details.
+    """
+
+    def __reduce__(self, _none_constructor=type(None), _args=()):
+        return _none_constructor, _args
+
+
+def attrib(
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    cmp=None,
+    hash=None,
+    init=True,
+    metadata=None,
+    type=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+):
+    """
     Create a new attribute on a class.
 
     ..  warning::
 
         Does *not* do anything unless the class is also decorated with
-        :func:`attr.s`!
+        `attr.s`!
 
     :param default: A value that is used if an ``attrs``-generated ``__init__``
         is used and no value is passed while instantiating or the attribute is
         excluded using ``init=False``.
 
-        If the value is an instance of :class:`Factory`, its callable will be
-        used to construct a new value (useful for mutable datatypes like lists
+        If the value is an instance of `attrs.Factory`, its callable will be
+        used to construct a new value (useful for mutable data types like lists
         or dicts).
 
-        If a default is not set (or set manually to ``attr.NOTHING``), a value
-        *must* be supplied when instantiating; otherwise a :exc:`TypeError`
+        If a default is not set (or set manually to `attrs.NOTHING`), a value
+        *must* be supplied when instantiating; otherwise a `TypeError`
         will be raised.
 
         The default can also be set using decorator notation as shown below.
 
-    :type default: Any value.
-
-    :param validator: :func:`callable` that is called by ``attrs``-generated
+    :type default: Any value
+
+    :param callable factory: Syntactic sugar for
+        ``default=attr.Factory(factory)``.
+
+    :param validator: `callable` that is called by ``attrs``-generated
         ``__init__`` methods after the instance has been initialized.  They
-        receive the initialized instance, the :class:`Attribute`, and the
+        receive the initialized instance, the :func:`~attrs.Attribute`, and the
         passed value.
 
         The return value is *not* inspected so the validator has to throw an
         exception itself.
 
-        If a ``list`` is passed, its items are treated as validators and must
+        If a `list` is passed, its items are treated as validators and must
         all pass.
 
         Validators can be globally disabled and re-enabled using
-        :func:`get_run_validators`.
+        `get_run_validators`.
 
         The validator can also be set using decorator notation as shown below.
 
-    :type validator: ``callable`` or a ``list`` of ``callable``\ s.
-
-    :param bool repr: Include this attribute in the generated ``__repr__``
-        method.
-    :param bool cmp: Include this attribute in the generated comparison methods
-        (``__eq__`` et al).
-    :param hash: Include this attribute in the generated ``__hash__``
-        method.  If ``None`` (default), mirror *cmp*'s value.  This is the
-        correct behavior according the Python spec.  Setting this value to
-        anything else than ``None`` is *discouraged*.
-    :type hash: ``bool`` or ``None``
+    :type validator: `callable` or a `list` of `callable`\\ s.
+
+    :param repr: Include this attribute in the generated ``__repr__``
+        method. If ``True``, include the attribute; if ``False``, omit it. By
+        default, the built-in ``repr()`` function is used. To override how the
+        attribute value is formatted, pass a ``callable`` that takes a single
+        value and returns a string. Note that the resulting string is used
+        as-is, i.e. it will be used directly *instead* of calling ``repr()``
+        (the default).
+    :type repr: a `bool` or a `callable` to use a custom function.
+
+    :param eq: If ``True`` (default), include this attribute in the
+        generated ``__eq__`` and ``__ne__`` methods that check two instances
+        for equality. To override how the attribute value is compared,
+        pass a ``callable`` that takes a single value and returns the value
+        to be compared.
+    :type eq: a `bool` or a `callable`.
+
+    :param order: If ``True`` (default), include this attributes in the
+        generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+        To override how the attribute value is ordered,
+        pass a ``callable`` that takes a single value and returns the value
+        to be ordered.
+    :type order: a `bool` or a `callable`.
+
+    :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+        same value. Must not be mixed with *eq* or *order*.
+    :type cmp: a `bool` or a `callable`.
+
+    :param Optional[bool] hash: Include this attribute in the generated
+        ``__hash__`` method.  If ``None`` (default), mirror *eq*'s value.  This
+        is the correct behavior according the Python spec.  Setting this value
+        to anything else than ``None`` is *discouraged*.
     :param bool init: Include this attribute in the generated ``__init__``
         method.  It is possible to set this to ``False`` and set a default
         value.  In that case this attributed is unconditionally initialized
         with the specified default value or factory.
-    :param callable convert: :func:`callable` that is called by
+    :param callable converter: `callable` that is called by
         ``attrs``-generated ``__init__`` methods to convert attribute's value
         to the desired format.  It is given the passed-in value, and the
         returned value will be used as the new value of the attribute.  The
         value is converted before being passed to the validator, if any.
     :param metadata: An arbitrary mapping, to be used by third-party
-        components.  See :ref:`extending_metadata`.
-
-    ..  versionchanged:: 17.1.0 *validator* can be a ``list`` now.
-    ..  versionchanged:: 17.1.0
-        *hash* is ``None`` and therefore mirrors *cmp* by default .
+        components.  See `extending_metadata`.
+    :param type: The type of the attribute.  In Python 3.6 or greater, the
+        preferred method to specify the type is using a variable annotation
+        (see :pep:`526`).
+        This argument is provided for backward compatibility.
+        Regardless of the approach used, the type will be stored on
+        ``Attribute.type``.
+
+        Please note that ``attrs`` doesn't do anything with this metadata by
+        itself. You can use it as part of your own code or for
+        `static type checking <types>`.
+    :param kw_only: Make this attribute keyword-only (Python 3+)
+        in the generated ``__init__`` (if ``init`` is ``False``, this
+        parameter is ignored).
+    :param on_setattr: Allows to overwrite the *on_setattr* setting from
+        `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+        Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+        attribute -- regardless of the setting in `attr.s`.
+    :type on_setattr: `callable`, or a list of callables, or `None`, or
+        `attrs.setters.NO_OP`
+
+    .. versionadded:: 15.2.0 *convert*
+    .. versionadded:: 16.3.0 *metadata*
+    .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+    .. versionchanged:: 17.1.0
+       *hash* is ``None`` and therefore mirrors *eq* by default.
+    .. versionadded:: 17.3.0 *type*
+    .. deprecated:: 17.4.0 *convert*
+    .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+       *convert* to achieve consistency with other noun-based arguments.
+    .. versionadded:: 18.1.0
+       ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+    .. versionadded:: 18.2.0 *kw_only*
+    .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+    .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+    .. versionchanged:: 21.1.0
+       *eq*, *order*, and *cmp* also accept a custom callable
+    .. versionchanged:: 21.1.0 *cmp* undeprecated
     """
+    eq, eq_key, order, order_key = _determine_attrib_eq_order(
+        cmp, eq, order, True
+    )
+
     if hash is not None and hash is not True and hash is not False:
         raise TypeError(
             "Invalid value for hash.  Must be True, False, or None."
         )
+
+    if factory is not None:
+        if default is not NOTHING:
+            raise ValueError(
+                "The `default` and `factory` arguments are mutually "
+                "exclusive."
+            )
+        if not callable(factory):
+            raise ValueError("The `factory` argument must be a callable.")
+        default = Factory(factory)
+
+    if metadata is None:
+        metadata = {}
+
+    # Apply syntactic sugar by auto-wrapping.
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
+    if validator and isinstance(validator, (list, tuple)):
+        validator = and_(*validator)
+
+    if converter and isinstance(converter, (list, tuple)):
+        converter = pipe(*converter)
+
     return _CountingAttr(
         default=default,
         validator=validator,
         repr=repr,
-        cmp=cmp,
+        cmp=None,
         hash=hash,
         init=init,
-        convert=convert,
+        converter=converter,
         metadata=metadata,
+        type=type,
+        kw_only=kw_only,
+        eq=eq,
+        eq_key=eq_key,
+        order=order,
+        order_key=order_key,
+        on_setattr=on_setattr,
     )
 
 
+def _compile_and_eval(script, globs, locs=None, filename=""):
+    """
+    "Exec" the script with the given global (globs) and local (locs) variables.
+    """
+    bytecode = compile(script, filename, "exec")
+    eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs):
+    """
+    Create the method with the script given and return the method object.
+    """
+    locs = {}
+
+    # In order of debuggers like PDB being able to step through the code,
+    # we add a fake linecache entry.
+    count = 1
+    base_filename = filename
+    while True:
+        linecache_tuple = (
+            len(script),
+            None,
+            script.splitlines(True),
+            filename,
+        )
+        old_val = linecache.cache.setdefault(filename, linecache_tuple)
+        if old_val == linecache_tuple:
+            break
+        else:
+            filename = "{}-{}>".format(base_filename[:-1], count)
+            count += 1
+
+    _compile_and_eval(script, globs, locs, filename)
+
+    return locs[name]
+
+
 def _make_attr_tuple_class(cls_name, attr_names):
     """
     Create a tuple subclass to hold `Attribute`s for an `attrs` class.
@@ -156,75 +348,273 @@
     ]
     if attr_names:
         for i, attr_name in enumerate(attr_names):
-            attr_class_template.append(_tuple_property_pat.format(
-                index=i,
-                attr_name=attr_name,
-            ))
+            attr_class_template.append(
+                _tuple_property_pat.format(index=i, attr_name=attr_name)
+            )
     else:
         attr_class_template.append("    pass")
-    globs = {"itemgetter": itemgetter}
-    eval(compile("\n".join(attr_class_template), "", "exec"), globs)
+    globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+    _compile_and_eval("\n".join(attr_class_template), globs)
     return globs[attr_class_name]
 
 
-def _transform_attrs(cls, these):
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+    "_Attributes",
+    [
+        # all attributes to build dunder methods for
+        "attrs",
+        # attributes that have been inherited
+        "base_attrs",
+        # map inherited attributes to their originating classes
+        "base_attrs_map",
+    ],
+)
+
+
+def _is_class_var(annot):
+    """
+    Check whether *annot* is a typing.ClassVar.
+
+    The string comparison hack is used to avoid evaluating all string
+    annotations which would put attrs-based classes at a performance
+    disadvantage compared to plain old classes.
+    """
+    annot = str(annot)
+
+    # Annotation can be quoted.
+    if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+        annot = annot[1:-1]
+
+    return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+    """
+    Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+    Requires Python 3.
+    """
+    attr = getattr(cls, attrib_name, _sentinel)
+    if attr is _sentinel:
+        return False
+
+    for base_cls in cls.__mro__[1:]:
+        a = getattr(base_cls, attrib_name, None)
+        if attr is a:
+            return False
+
+    return True
+
+
+def _get_annotations(cls):
+    """
+    Get annotations for *cls*.
+    """
+    if _has_own_attribute(cls, "__annotations__"):
+        return cls.__annotations__
+
+    return {}
+
+
+def _counter_getter(e):
+    """
+    Key function for sorting to avoid re-creating a lambda for every class.
     """
-    Transforms all `_CountingAttr`s on a class into `Attribute`s and saves the
-    list in `__attrs_attrs__`.
+    return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+    """
+    Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+    """
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in reversed(cls.__mro__[1:-1]):
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.inherited or a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls
+
+    # For each name, only keep the freshest definition i.e. the furthest at the
+    # back.  base_attr_map is fine because it gets overwritten with every new
+    # instance.
+    filtered = []
+    seen = set()
+    for a in reversed(base_attrs):
+        if a.name in seen:
+            continue
+        filtered.insert(0, a)
+        seen.add(a.name)
+
+    return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+    """
+    Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+    N.B. *taken_attr_names* will be mutated.
+
+    Adhere to the old incorrect behavior.
+
+    Notably it collects from the front and considers inherited attributes which
+    leads to the buggy behavior reported in #428.
+    """
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in cls.__mro__[1:-1]:
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)
+            taken_attr_names.add(a.name)
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls
+
+    return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+    cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+    """
+    Transform all `_CountingAttr`s on a class into `Attribute`s.
 
     If *these* is passed, use that and don't look for them on the class.
+
+    *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+    use the old -- incorrect -- order.  See #428.
+
+    Return an `_Attributes`.
     """
-    super_cls = []
-    for c in reversed(cls.__mro__[1:-1]):
-        sub_attrs = getattr(c, "__attrs_attrs__", None)
-        if sub_attrs is not None:
-            super_cls.extend(a for a in sub_attrs if a not in super_cls)
-    if these is None:
-        ca_list = [(name, attr)
-                   for name, attr
-                   in cls.__dict__.items()
-                   if isinstance(attr, _CountingAttr)]
+    cd = cls.__dict__
+    anns = _get_annotations(cls)
+
+    if these is not None:
+        ca_list = [(name, ca) for name, ca in these.items()]
+
+        if not isinstance(these, ordered_dict):
+            ca_list.sort(key=_counter_getter)
+    elif auto_attribs is True:
+        ca_names = {
+            name
+            for name, attr in cd.items()
+            if isinstance(attr, _CountingAttr)
+        }
+        ca_list = []
+        annot_names = set()
+        for attr_name, type in anns.items():
+            if _is_class_var(type):
+                continue
+            annot_names.add(attr_name)
+            a = cd.get(attr_name, NOTHING)
+
+            if not isinstance(a, _CountingAttr):
+                if a is NOTHING:
+                    a = attrib()
+                else:
+                    a = attrib(default=a)
+            ca_list.append((attr_name, a))
+
+        unannotated = ca_names - annot_names
+        if len(unannotated) > 0:
+            raise UnannotatedAttributeError(
+                "The following `attr.ib`s lack a type annotation: "
+                + ", ".join(
+                    sorted(unannotated, key=lambda n: cd.get(n).counter)
+                )
+                + "."
+            )
     else:
-        ca_list = [(name, ca)
-                   for name, ca
-                   in iteritems(these)]
-
-    non_super_attrs = [
-        Attribute.from_counting_attr(name=attr_name, ca=ca)
-        for attr_name, ca
-        in sorted(ca_list, key=lambda e: e[1].counter)
+        ca_list = sorted(
+            (
+                (name, attr)
+                for name, attr in cd.items()
+                if isinstance(attr, _CountingAttr)
+            ),
+            key=lambda e: e[1].counter,
+        )
+
+    own_attrs = [
+        Attribute.from_counting_attr(
+            name=attr_name, ca=ca, type=anns.get(attr_name)
+        )
+        for attr_name, ca in ca_list
     ]
-    attr_names = [a.name for a in super_cls + non_super_attrs]
-
-    AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
-
-    cls.__attrs_attrs__ = AttrsClass(super_cls + [
-        Attribute.from_counting_attr(name=attr_name, ca=ca)
-        for attr_name, ca
-        in sorted(ca_list, key=lambda e: e[1].counter)
-    ])
-
+
+    if collect_by_mro:
+        base_attrs, base_attr_map = _collect_base_attrs(
+            cls, {a.name for a in own_attrs}
+        )
+    else:
+        base_attrs, base_attr_map = _collect_base_attrs_broken(
+            cls, {a.name for a in own_attrs}
+        )
+
+    if kw_only:
+        own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+        base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+    attrs = base_attrs + own_attrs
+
+    # Mandatory vs non-mandatory attr order only matters when they are part of
+    # the __init__ signature and when they aren't kw_only (which are moved to
+    # the end and can be mandatory or non-mandatory in any order, as they will
+    # be specified as keyword args anyway). Check the order of those attrs:
     had_default = False
-    for a in cls.__attrs_attrs__:
-        if these is None and a not in super_cls:
-            setattr(cls, a.name, a)
-        if had_default is True and a.default is NOTHING and a.init is True:
+    for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+        if had_default is True and a.default is NOTHING:
             raise ValueError(
                 "No mandatory attributes allowed after an attribute with a "
-                "default value or factory.  Attribute in question: {a!r}"
-                .format(a=a)
+                "default value or factory.  Attribute in question: %r" % (a,)
             )
-        elif had_default is False and \
-                a.default is not NOTHING and \
-                a.init is not False:
+
+        if had_default is False and a.default is not NOTHING:
             had_default = True
 
-
-def _frozen_setattrs(self, name, value):
-    """
-    Attached to frozen classes as __setattr__.
-    """
-    raise FrozenInstanceError()
+    if field_transformer is not None:
+        attrs = field_transformer(cls, attrs)
+
+    # Create AttrsClass *after* applying the field_transformer since it may
+    # add or remove attributes!
+    attr_names = [a.name for a in attrs]
+    AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+    return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+if PYPY:
+
+    def _frozen_setattrs(self, name, value):
+        """
+        Attached to frozen classes as __setattr__.
+        """
+        if isinstance(self, BaseException) and name in (
+            "__cause__",
+            "__context__",
+        ):
+            BaseException.__setattr__(self, name, value)
+            return
+
+        raise FrozenInstanceError()
+
+else:
+
+    def _frozen_setattrs(self, name, value):
+        """
+        Attached to frozen classes as __setattr__.
+        """
+        raise FrozenInstanceError()
 
 
 def _frozen_delattrs(self, name):
@@ -234,44 +624,661 @@
     raise FrozenInstanceError()
 
 
-def attributes(maybe_cls=None, these=None, repr_ns=None,
-               repr=True, cmp=True, hash=None, init=True,
-               slots=False, frozen=False, str=False):
+class _ClassBuilder:
+    """
+    Iteratively build *one* class.
+    """
+
+    __slots__ = (
+        "_attr_names",
+        "_attrs",
+        "_base_attr_map",
+        "_base_names",
+        "_cache_hash",
+        "_cls",
+        "_cls_dict",
+        "_delete_attribs",
+        "_frozen",
+        "_has_pre_init",
+        "_has_post_init",
+        "_is_exc",
+        "_on_setattr",
+        "_slots",
+        "_weakref_slot",
+        "_wrote_own_setattr",
+        "_has_custom_setattr",
+    )
+
+    def __init__(
+        self,
+        cls,
+        these,
+        slots,
+        frozen,
+        weakref_slot,
+        getstate_setstate,
+        auto_attribs,
+        kw_only,
+        cache_hash,
+        is_exc,
+        collect_by_mro,
+        on_setattr,
+        has_custom_setattr,
+        field_transformer,
+    ):
+        attrs, base_attrs, base_map = _transform_attrs(
+            cls,
+            these,
+            auto_attribs,
+            kw_only,
+            collect_by_mro,
+            field_transformer,
+        )
+
+        self._cls = cls
+        self._cls_dict = dict(cls.__dict__) if slots else {}
+        self._attrs = attrs
+        self._base_names = {a.name for a in base_attrs}
+        self._base_attr_map = base_map
+        self._attr_names = tuple(a.name for a in attrs)
+        self._slots = slots
+        self._frozen = frozen
+        self._weakref_slot = weakref_slot
+        self._cache_hash = cache_hash
+        self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+        self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+        self._delete_attribs = not bool(these)
+        self._is_exc = is_exc
+        self._on_setattr = on_setattr
+
+        self._has_custom_setattr = has_custom_setattr
+        self._wrote_own_setattr = False
+
+        self._cls_dict["__attrs_attrs__"] = self._attrs
+
+        if frozen:
+            self._cls_dict["__setattr__"] = _frozen_setattrs
+            self._cls_dict["__delattr__"] = _frozen_delattrs
+
+            self._wrote_own_setattr = True
+        elif on_setattr in (
+            _ng_default_on_setattr,
+            setters.validate,
+            setters.convert,
+        ):
+            has_validator = has_converter = False
+            for a in attrs:
+                if a.validator is not None:
+                    has_validator = True
+                if a.converter is not None:
+                    has_converter = True
+
+                if has_validator and has_converter:
+                    break
+            if (
+                (
+                    on_setattr == _ng_default_on_setattr
+                    and not (has_validator or has_converter)
+                )
+                or (on_setattr == setters.validate and not has_validator)
+                or (on_setattr == setters.convert and not has_converter)
+            ):
+                # If class-level on_setattr is set to convert + validate, but
+                # there's no field to convert or validate, pretend like there's
+                # no on_setattr.
+                self._on_setattr = None
+
+        if getstate_setstate:
+            (
+                self._cls_dict["__getstate__"],
+                self._cls_dict["__setstate__"],
+            ) = self._make_getstate_setstate()
+
+    def __repr__(self):
+        return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+    def build_class(self):
+        """
+        Finalize class based on the accumulated configuration.
+
+        Builder cannot be used after calling this method.
+        """
+        if self._slots is True:
+            return self._create_slots_class()
+        else:
+            return self._patch_original_class()
+
+    def _patch_original_class(self):
+        """
+        Apply accumulated methods and return the class.
+        """
+        cls = self._cls
+        base_names = self._base_names
+
+        # Clean class of attribute definitions (`attr.ib()`s).
+        if self._delete_attribs:
+            for name in self._attr_names:
+                if (
+                    name not in base_names
+                    and getattr(cls, name, _sentinel) is not _sentinel
+                ):
+                    try:
+                        delattr(cls, name)
+                    except AttributeError:
+                        # This can happen if a base class defines a class
+                        # variable and we want to set an attribute with the
+                        # same name by using only a type annotation.
+                        pass
+
+        # Attach our dunder methods.
+        for name, value in self._cls_dict.items():
+            setattr(cls, name, value)
+
+        # If we've inherited an attrs __setattr__ and don't write our own,
+        # reset it to object's.
+        if not self._wrote_own_setattr and getattr(
+            cls, "__attrs_own_setattr__", False
+        ):
+            cls.__attrs_own_setattr__ = False
+
+            if not self._has_custom_setattr:
+                cls.__setattr__ = _obj_setattr
+
+        return cls
+
+    def _create_slots_class(self):
+        """
+        Build and return a new class with a `__slots__` attribute.
+        """
+        cd = {
+            k: v
+            for k, v in self._cls_dict.items()
+            if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+        }
+
+        # If our class doesn't have its own implementation of __setattr__
+        # (either from the user or by us), check the bases, if one of them has
+        # an attrs-made __setattr__, that needs to be reset. We don't walk the
+        # MRO because we only care about our immediate base classes.
+        # XXX: This can be confused by subclassing a slotted attrs class with
+        # XXX: a non-attrs class and subclass the resulting class with an attrs
+        # XXX: class.  See `test_slotted_confused` for details.  For now that's
+        # XXX: OK with us.
+        if not self._wrote_own_setattr:
+            cd["__attrs_own_setattr__"] = False
+
+            if not self._has_custom_setattr:
+                for base_cls in self._cls.__bases__:
+                    if base_cls.__dict__.get("__attrs_own_setattr__", False):
+                        cd["__setattr__"] = _obj_setattr
+                        break
+
+        # Traverse the MRO to collect existing slots
+        # and check for an existing __weakref__.
+        existing_slots = dict()
+        weakref_inherited = False
+        for base_cls in self._cls.__mro__[1:-1]:
+            if base_cls.__dict__.get("__weakref__", None) is not None:
+                weakref_inherited = True
+            existing_slots.update(
+                {
+                    name: getattr(base_cls, name)
+                    for name in getattr(base_cls, "__slots__", [])
+                }
+            )
+
+        base_names = set(self._base_names)
+
+        names = self._attr_names
+        if (
+            self._weakref_slot
+            and "__weakref__" not in getattr(self._cls, "__slots__", ())
+            and "__weakref__" not in names
+            and not weakref_inherited
+        ):
+            names += ("__weakref__",)
+
+        # We only add the names of attributes that aren't inherited.
+        # Setting __slots__ to inherited attributes wastes memory.
+        slot_names = [name for name in names if name not in base_names]
+        # There are slots for attributes from current class
+        # that are defined in parent classes.
+        # As their descriptors may be overridden by a child class,
+        # we collect them here and update the class dict
+        reused_slots = {
+            slot: slot_descriptor
+            for slot, slot_descriptor in existing_slots.items()
+            if slot in slot_names
+        }
+        slot_names = [name for name in slot_names if name not in reused_slots]
+        cd.update(reused_slots)
+        if self._cache_hash:
+            slot_names.append(_hash_cache_field)
+        cd["__slots__"] = tuple(slot_names)
+
+        cd["__qualname__"] = self._cls.__qualname__
+
+        # Create new class based on old class and our methods.
+        cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+        # The following is a fix for
+        # <https://github.com/python-attrs/attrs/issues/102>.  On Python 3,
+        # if a method mentions `__class__` or uses the no-arg super(), the
+        # compiler will bake a reference to the class in the method itself
+        # as `method.__closure__`.  Since we replace the class with a
+        # clone, we rewrite these references so it keeps working.
+        for item in cls.__dict__.values():
+            if isinstance(item, (classmethod, staticmethod)):
+                # Class- and staticmethods hide their functions inside.
+                # These might need to be rewritten as well.
+                closure_cells = getattr(item.__func__, "__closure__", None)
+            elif isinstance(item, property):
+                # Workaround for property `super()` shortcut (PY3-only).
+                # There is no universal way for other descriptors.
+                closure_cells = getattr(item.fget, "__closure__", None)
+            else:
+                closure_cells = getattr(item, "__closure__", None)
+
+            if not closure_cells:  # Catch None or the empty list.
+                continue
+            for cell in closure_cells:
+                try:
+                    match = cell.cell_contents is self._cls
+                except ValueError:  # ValueError: Cell is empty
+                    pass
+                else:
+                    if match:
+                        set_closure_cell(cell, cls)
+
+        return cls
+
+    def add_repr(self, ns):
+        self._cls_dict["__repr__"] = self._add_method_dunders(
+            _make_repr(self._attrs, ns, self._cls)
+        )
+        return self
+
+    def add_str(self):
+        repr = self._cls_dict.get("__repr__")
+        if repr is None:
+            raise ValueError(
+                "__str__ can only be generated if a __repr__ exists."
+            )
+
+        def __str__(self):
+            return self.__repr__()
+
+        self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+        return self
+
+    def _make_getstate_setstate(self):
+        """
+        Create custom __setstate__ and __getstate__ methods.
+        """
+        # __weakref__ is not writable.
+        state_attr_names = tuple(
+            an for an in self._attr_names if an != "__weakref__"
+        )
+
+        def slots_getstate(self):
+            """
+            Automatically created by attrs.
+            """
+            return tuple(getattr(self, name) for name in state_attr_names)
+
+        hash_caching_enabled = self._cache_hash
+
+        def slots_setstate(self, state):
+            """
+            Automatically created by attrs.
+            """
+            __bound_setattr = _obj_setattr.__get__(self, Attribute)
+            for name, value in zip(state_attr_names, state):
+                __bound_setattr(name, value)
+
+            # The hash code cache is not included when the object is
+            # serialized, but it still needs to be initialized to None to
+            # indicate that the first call to __hash__ should be a cache
+            # miss.
+            if hash_caching_enabled:
+                __bound_setattr(_hash_cache_field, None)
+
+        return slots_getstate, slots_setstate
+
+    def make_unhashable(self):
+        self._cls_dict["__hash__"] = None
+        return self
+
+    def add_hash(self):
+        self._cls_dict["__hash__"] = self._add_method_dunders(
+            _make_hash(
+                self._cls,
+                self._attrs,
+                frozen=self._frozen,
+                cache_hash=self._cache_hash,
+            )
+        )
+
+        return self
+
+    def add_init(self):
+        self._cls_dict["__init__"] = self._add_method_dunders(
+            _make_init(
+                self._cls,
+                self._attrs,
+                self._has_pre_init,
+                self._has_post_init,
+                self._frozen,
+                self._slots,
+                self._cache_hash,
+                self._base_attr_map,
+                self._is_exc,
+                self._on_setattr,
+                attrs_init=False,
+            )
+        )
+
+        return self
+
+    def add_match_args(self):
+        self._cls_dict["__match_args__"] = tuple(
+            field.name
+            for field in self._attrs
+            if field.init and not field.kw_only
+        )
+
+    def add_attrs_init(self):
+        self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+            _make_init(
+                self._cls,
+                self._attrs,
+                self._has_pre_init,
+                self._has_post_init,
+                self._frozen,
+                self._slots,
+                self._cache_hash,
+                self._base_attr_map,
+                self._is_exc,
+                self._on_setattr,
+                attrs_init=True,
+            )
+        )
+
+        return self
+
+    def add_eq(self):
+        cd = self._cls_dict
+
+        cd["__eq__"] = self._add_method_dunders(
+            _make_eq(self._cls, self._attrs)
+        )
+        cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+        return self
+
+    def add_order(self):
+        cd = self._cls_dict
+
+        cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+            self._add_method_dunders(meth)
+            for meth in _make_order(self._cls, self._attrs)
+        )
+
+        return self
+
+    def add_setattr(self):
+        if self._frozen:
+            return self
+
+        sa_attrs = {}
+        for a in self._attrs:
+            on_setattr = a.on_setattr or self._on_setattr
+            if on_setattr and on_setattr is not setters.NO_OP:
+                sa_attrs[a.name] = a, on_setattr
+
+        if not sa_attrs:
+            return self
+
+        if self._has_custom_setattr:
+            # We need to write a __setattr__ but there already is one!
+            raise ValueError(
+                "Can't combine custom __setattr__ with on_setattr hooks."
+            )
+
+        # docstring comes from _add_method_dunders
+        def __setattr__(self, name, val):
+            try:
+                a, hook = sa_attrs[name]
+            except KeyError:
+                nval = val
+            else:
+                nval = hook(self, a, val)
+
+            _obj_setattr(self, name, nval)
+
+        self._cls_dict["__attrs_own_setattr__"] = True
+        self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+        self._wrote_own_setattr = True
+
+        return self
+
+    def _add_method_dunders(self, method):
+        """
+        Add __module__ and __qualname__ to a *method* if possible.
+        """
+        try:
+            method.__module__ = self._cls.__module__
+        except AttributeError:
+            pass
+
+        try:
+            method.__qualname__ = ".".join(
+                (self._cls.__qualname__, method.__name__)
+            )
+        except AttributeError:
+            pass
+
+        try:
+            method.__doc__ = "Method generated by attrs for class %s." % (
+                self._cls.__qualname__,
+            )
+        except AttributeError:
+            pass
+
+        return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+    """
+    Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+    values of eq and order.  If *eq* is None, set it to *default_eq*.
+    """
+    if cmp is not None and any((eq is not None, order is not None)):
+        raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        return cmp, cmp
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq = default_eq
+
+    if order is None:
+        order = eq
+
+    if eq is False and order is True:
+        raise ValueError("`order` can only be True if `eq` is True too.")
+
+    return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+    """
+    Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+    values of eq and order.  If *eq* is None, set it to *default_eq*.
+    """
+    if cmp is not None and any((eq is not None, order is not None)):
+        raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+    def decide_callable_or_boolean(value):
+        """
+        Decide whether a key function is used.
+        """
+        if callable(value):
+            value, key = True, value
+        else:
+            key = None
+        return value, key
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        cmp, cmp_key = decide_callable_or_boolean(cmp)
+        return cmp, cmp_key, cmp, cmp_key
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq, eq_key = default_eq, None
+    else:
+        eq, eq_key = decide_callable_or_boolean(eq)
+
+    if order is None:
+        order, order_key = eq, eq_key
+    else:
+        order, order_key = decide_callable_or_boolean(order)
+
+    if eq is False and order is True:
+        raise ValueError("`order` can only be True if `eq` is True too.")
+
+    return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+    cls, flag, auto_detect, dunders, default=True
+):
+    """
+    Check whether we should implement a set of methods for *cls*.
+
+    *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+    same as passed into @attr.s and *dunders* is a tuple of attribute names
+    whose presence signal that the user has implemented it themselves.
+
+    Return *default* if no reason for either for or against is found.
+    """
+    if flag is True or flag is False:
+        return flag
+
+    if flag is None and auto_detect is False:
+        return default
+
+    # Logically, flag is None and auto_detect is True here.
+    for dunder in dunders:
+        if _has_own_attribute(cls, dunder):
+            return False
+
+    return default
+
+
+def attrs(
+    maybe_cls=None,
+    these=None,
+    repr_ns=None,
+    repr=None,
+    cmp=None,
+    hash=None,
+    init=None,
+    slots=False,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=False,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=False,
+    eq=None,
+    order=None,
+    auto_detect=False,
+    collect_by_mro=False,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+):
     r"""
     A class decorator that adds `dunder
     <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
-    specified attributes using :func:`attr.ib` or the *these* argument.
-
-    :param these: A dictionary of name to :func:`attr.ib` mappings.  This is
+    specified attributes using `attr.ib` or the *these* argument.
+
+    :param these: A dictionary of name to `attr.ib` mappings.  This is
         useful to avoid the definition of your attributes within the class body
         because you can't (e.g. if you want to add ``__repr__`` methods to
         Django models) or don't want to.
 
         If *these* is not ``None``, ``attrs`` will *not* search the class body
-        for attributes.
-
-    :type these: :class:`dict` of :class:`str` to :func:`attr.ib`
+        for attributes and will *not* remove any attributes from it.
+
+        If *these* is an ordered dict (`dict` on Python 3.6+,
+        `collections.OrderedDict` otherwise), the order is deduced from
+        the order of the attributes inside *these*.  Otherwise the order
+        of the definition of the attributes is used.
+
+    :type these: `dict` of `str` to `attr.ib`
 
     :param str repr_ns: When using nested classes, there's no way in Python 2
         to automatically detect that.  Therefore it's possible to set the
         namespace explicitly for a more meaningful ``repr`` output.
+    :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+        *order*, and *hash* arguments explicitly, assume they are set to
+        ``True`` **unless any** of the involved methods for one of the
+        arguments is implemented in the *current* class (i.e. it is *not*
+        inherited from some base class).
+
+        So for example by implementing ``__eq__`` on a class yourself,
+        ``attrs`` will deduce ``eq=False`` and will create *neither*
+        ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+        ``__ne__`` by default, so it *should* be enough to only implement
+        ``__eq__`` in most cases).
+
+        .. warning::
+
+           If you prevent ``attrs`` from creating the ordering methods for you
+           (``order=False``, e.g. by implementing ``__le__``), it becomes
+           *your* responsibility to make sure its ordering is sound. The best
+           way is to use the `functools.total_ordering` decorator.
+
+
+        Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+        *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+        *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+        an `attrs.exceptions.PythonTooOldError`.
+
     :param bool repr: Create a ``__repr__`` method with a human readable
-        represantation of ``attrs`` attributes..
+        representation of ``attrs`` attributes..
     :param bool str: Create a ``__str__`` method that is identical to
         ``__repr__``.  This is usually not necessary except for
-        :class:`Exception`\ s.
-    :param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
-        ``__gt__``, and ``__ge__`` methods that compare the class as if it were
-        a tuple of its ``attrs`` attributes.  But the attributes are *only*
-        compared, if the type of both classes is *identical*!
-    :param hash: If ``None`` (default), the ``__hash__`` method is generated
-        according how *cmp* and *frozen* are set.
+        `Exception`\ s.
+    :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+        and ``__ne__`` methods that check two instances for equality.
+
+        They compare the instances as if they were tuples of their ``attrs``
+        attributes if and only if the types of both classes are *identical*!
+    :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+        ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+        allow instances to be ordered. If ``None`` (default) mirror value of
+        *eq*.
+    :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+        and *order* to the same value. Must not be mixed with *eq* or *order*.
+    :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+        is generated according how *eq* and *frozen* are set.
 
         1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
-        2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
+        2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
            None, marking it unhashable (which it is).
-        3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
-           ``__hash__`` method of the superclass will be used (if superclass is
+        3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+           ``__hash__`` method of the base class will be used (if base class is
            ``object``, this means it will fall back to id-based hashing.).
 
         Although not recommended, you can decide for yourself and force
@@ -279,29 +1286,37 @@
         didn't freeze it programmatically) by passing ``True`` or not.  Both of
         these cases are rather special and should be used carefully.
 
-        See the `Python documentation \
-        <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
-        and the `GitHub issue that led to the default behavior \
-        <https://github.com/python-attrs/attrs/issues/136>`_ for more details.
-    :type hash: ``bool`` or ``None``
-    :param bool init: Create a ``__init__`` method that initialiazes the
-        ``attrs`` attributes.  Leading underscores are stripped for the
-        argument name.  If a ``__attrs_post_init__`` method exists on the
-        class, it will be called after the class is fully initialized.
-    :param bool slots: Create a slots_-style class that's more
-        memory-efficient.  See :ref:`slots` for further ramifications.
+        See our documentation on `hashing`, Python's documentation on
+        `object.__hash__`, and the `GitHub issue that led to the default \
+        behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+        details.
+    :param bool init: Create a ``__init__`` method that initializes the
+        ``attrs`` attributes. Leading underscores are stripped for the argument
+        name. If a ``__attrs_pre_init__`` method exists on the class, it will
+        be called before the class is initialized. If a ``__attrs_post_init__``
+        method exists on the class, it will be called after the class is fully
+        initialized.
+
+        If ``init`` is ``False``, an ``__attrs_init__`` method will be
+        injected instead. This allows you to define a custom ``__init__``
+        method that can do pre-init work such as ``super().__init__()``,
+        and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+    :param bool slots: Create a `slotted class <slotted classes>` that's more
+        memory-efficient. Slotted classes are generally superior to the default
+        dict classes, but have some gotchas you should know about, so we
+        encourage you to read the `glossary entry <slotted classes>`.
     :param bool frozen: Make instances immutable after initialization.  If
         someone attempts to modify a frozen instance,
-        :exc:`attr.exceptions.FrozenInstanceError` is raised.
-
-        Please note:
+        `attr.exceptions.FrozenInstanceError` is raised.
+
+        .. note::
 
             1. This is achieved by installing a custom ``__setattr__`` method
-               on your class so you can't implement an own one.
+               on your class, so you can't implement your own.
 
             2. True immutability is impossible in Python.
 
-            3. This *does* have a minor a runtime performance :ref:`impact
+            3. This *does* have a minor a runtime performance `impact
                <how-frozen>` when initializing new instances.  In other words:
                ``__init__`` is slightly slower with ``frozen=True``.
 
@@ -310,316 +1325,651 @@
                circumvent that limitation by using
                ``object.__setattr__(self, "attribute_name", value)``.
 
-        ..  _slots: https://docs.python.org/3.5/reference/datamodel.html#slots
-
-    ..  versionadded:: 16.0.0 *slots*
-    ..  versionadded:: 16.1.0 *frozen*
-    ..  versionadded:: 16.3.0 *str*, and support for ``__attrs_post_init__``.
-    ..  versionchanged::
-            17.1.0 *hash* supports ``None`` as value which is also the default
-            now.
+            5. Subclasses of a frozen class are frozen too.
+
+    :param bool weakref_slot: Make instances weak-referenceable.  This has no
+        effect unless ``slots`` is also enabled.
+    :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated
+        attributes (Python 3.6 and later only) from the class body.
+
+        In this case, you **must** annotate every field.  If ``attrs``
+        encounters a field that is set to an `attr.ib` but lacks a type
+        annotation, an `attr.exceptions.UnannotatedAttributeError` is
+        raised.  Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+        want to set a type.
+
+        If you assign a value to those attributes (e.g. ``x: int = 42``), that
+        value becomes the default value like if it were passed using
+        ``attr.ib(default=42)``.  Passing an instance of `attrs.Factory` also
+        works as expected in most cases (see warning below).
+
+        Attributes annotated as `typing.ClassVar`, and attributes that are
+        neither annotated nor set to an `attr.ib` are **ignored**.
+
+        .. warning::
+           For features that use the attribute name to create decorators (e.g.
+           `validators <validators>`), you still *must* assign `attr.ib` to
+           them. Otherwise Python will either not find the name or try to use
+           the default value to call e.g. ``validator`` on it.
+
+           These errors can be quite confusing and probably the most common bug
+           report on our bug tracker.
+
+    :param bool kw_only: Make all attributes keyword-only (Python 3+)
+        in the generated ``__init__`` (if ``init`` is ``False``, this
+        parameter is ignored).
+    :param bool cache_hash: Ensure that the object's hash code is computed
+        only once and stored on the object.  If this is set to ``True``,
+        hashing must be either explicitly or implicitly enabled for this
+        class.  If the hash code is cached, avoid any reassignments of
+        fields involved in hash code computation or mutations of the objects
+        those fields point to after object creation.  If such changes occur,
+        the behavior of the object's hash code is undefined.
+    :param bool auto_exc: If the class subclasses `BaseException`
+        (which implicitly includes any subclass of any exception), the
+        following happens to behave like a well-behaved Python exceptions
+        class:
+
+        - the values for *eq*, *order*, and *hash* are ignored and the
+          instances compare and hash by the instance's ids (N.B. ``attrs`` will
+          *not* remove existing implementations of ``__hash__`` or the equality
+          methods. It just won't add own ones.),
+        - all attributes that are either passed into ``__init__`` or have a
+          default value are additionally available as a tuple in the ``args``
+          attribute,
+        - the value of *str* is ignored leaving ``__str__`` to base classes.
+    :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+       collects attributes from base classes.  The default behavior is
+       incorrect in certain cases of multiple inheritance.  It should be on by
+       default but is kept off for backward-compatibility.
+
+       See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+       more details.
+
+    :param Optional[bool] getstate_setstate:
+       .. note::
+          This is usually only interesting for slotted classes and you should
+          probably just set *auto_detect* to `True`.
+
+       If `True`, ``__getstate__`` and
+       ``__setstate__`` are generated and attached to the class. This is
+       necessary for slotted classes to be pickleable. If left `None`, it's
+       `True` by default for slotted classes and ``False`` for dict classes.
+
+       If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+       and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+       on the class (i.e. not inherited), it is set to `False` (this is usually
+       what you want).
+
+    :param on_setattr: A callable that is run whenever the user attempts to set
+        an attribute (either by assignment like ``i.x = 42`` or by using
+        `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+        as validators: the instance, the attribute that is being modified, and
+        the new value.
+
+        If no exception is raised, the attribute is set to the return value of
+        the callable.
+
+        If a list of callables is passed, they're automatically wrapped in an
+        `attrs.setters.pipe`.
+    :type on_setattr: `callable`, or a list of callables, or `None`, or
+        `attrs.setters.NO_OP`
+
+    :param Optional[callable] field_transformer:
+        A function that is called with the original class object and all
+        fields right before ``attrs`` finalizes the class.  You can use
+        this, e.g., to automatically add converters or validators to
+        fields based on their types.  See `transform-fields` for more details.
+
+    :param bool match_args:
+        If `True` (default), set ``__match_args__`` on the class to support
+        :pep:`634` (Structural Pattern Matching). It is a tuple of all
+        non-keyword-only ``__init__`` parameter names on Python 3.10 and later.
+        Ignored on older Python versions.
+
+    .. versionadded:: 16.0.0 *slots*
+    .. versionadded:: 16.1.0 *frozen*
+    .. versionadded:: 16.3.0 *str*
+    .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+    .. versionchanged:: 17.1.0
+       *hash* supports ``None`` as value which is also the default now.
+    .. versionadded:: 17.3.0 *auto_attribs*
+    .. versionchanged:: 18.1.0
+       If *these* is passed, no attributes are deleted from the class body.
+    .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+    .. versionadded:: 18.2.0 *weakref_slot*
+    .. deprecated:: 18.2.0
+       ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+       `DeprecationWarning` if the classes compared are subclasses of
+       each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+       to each other.
+    .. versionchanged:: 19.2.0
+       ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+       subclasses comparable anymore.
+    .. versionadded:: 18.2.0 *kw_only*
+    .. versionadded:: 18.2.0 *cache_hash*
+    .. versionadded:: 19.1.0 *auto_exc*
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
+    .. versionadded:: 20.1.0 *auto_detect*
+    .. versionadded:: 20.1.0 *collect_by_mro*
+    .. versionadded:: 20.1.0 *getstate_setstate*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionadded:: 20.3.0 *field_transformer*
+    .. versionchanged:: 21.1.0
+       ``init=False`` injects ``__attrs_init__``
+    .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+    .. versionchanged:: 21.1.0 *cmp* undeprecated
+    .. versionadded:: 21.3.0 *match_args*
     """
+    eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+    hash_ = hash  # work around the lack of nonlocal
+
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
     def wrap(cls):
-        if getattr(cls, "__class__", None) is None:
-            raise TypeError("attrs only works with new-style classes.")
-
-        if repr is False and str is True:
-            raise ValueError(
-                "__str__ can only be generated if a __repr__ exists."
-            )
-
-        if slots:
-            # Only need this later if we're using slots.
-            if these is None:
-                ca_list = [name
-                           for name, attr
-                           in cls.__dict__.items()
-                           if isinstance(attr, _CountingAttr)]
-            else:
-                ca_list = list(iterkeys(these))
-        _transform_attrs(cls, these)
-
-        # Can't just re-use frozen name because Python's scoping. :(
-        # Can't compare function objects because Python 2 is terrible. :(
-        effectively_frozen = _has_frozen_superclass(cls) or frozen
-        if repr is True:
-            cls = _add_repr(cls, ns=repr_ns)
+        is_frozen = frozen or _has_frozen_base_class(cls)
+        is_exc = auto_exc is True and issubclass(cls, BaseException)
+        has_own_setattr = auto_detect and _has_own_attribute(
+            cls, "__setattr__"
+        )
+
+        if has_own_setattr and is_frozen:
+            raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+        builder = _ClassBuilder(
+            cls,
+            these,
+            slots,
+            is_frozen,
+            weakref_slot,
+            _determine_whether_to_implement(
+                cls,
+                getstate_setstate,
+                auto_detect,
+                ("__getstate__", "__setstate__"),
+                default=slots,
+            ),
+            auto_attribs,
+            kw_only,
+            cache_hash,
+            is_exc,
+            collect_by_mro,
+            on_setattr,
+            has_own_setattr,
+            field_transformer,
+        )
+        if _determine_whether_to_implement(
+            cls, repr, auto_detect, ("__repr__",)
+        ):
+            builder.add_repr(repr_ns)
         if str is True:
-            cls.__str__ = cls.__repr__
-        if cmp is True:
-            cls = _add_cmp(cls)
-
+            builder.add_str()
+
+        eq = _determine_whether_to_implement(
+            cls, eq_, auto_detect, ("__eq__", "__ne__")
+        )
+        if not is_exc and eq is True:
+            builder.add_eq()
+        if not is_exc and _determine_whether_to_implement(
+            cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+        ):
+            builder.add_order()
+
+        builder.add_setattr()
+
+        if (
+            hash_ is None
+            and auto_detect is True
+            and _has_own_attribute(cls, "__hash__")
+        ):
+            hash = False
+        else:
+            hash = hash_
         if hash is not True and hash is not False and hash is not None:
+            # Can't use `hash in` because 1 == True for example.
             raise TypeError(
                 "Invalid value for hash.  Must be True, False, or None."
             )
-        elif hash is False or (hash is None and cmp is False):
-            pass
-        elif hash is True or (hash is None and cmp is True and frozen is True):
-            cls = _add_hash(cls)
+        elif hash is False or (hash is None and eq is False) or is_exc:
+            # Don't do anything. Should fall back to __object__'s __hash__
+            # which is by id.
+            if cache_hash:
+                raise TypeError(
+                    "Invalid value for cache_hash.  To use hash caching,"
+                    " hashing must be either explicitly or implicitly "
+                    "enabled."
+                )
+        elif hash is True or (
+            hash is None and eq is True and is_frozen is True
+        ):
+            # Build a __hash__ if told so, or if it's safe.
+            builder.add_hash()
         else:
-            cls.__hash__ = None
-
-        if init is True:
-            cls = _add_init(cls, effectively_frozen)
-        if effectively_frozen is True:
-            cls.__setattr__ = _frozen_setattrs
-            cls.__delattr__ = _frozen_delattrs
-            if slots is True:
-                # slots and frozen require __getstate__/__setstate__ to work
-                cls = _add_pickle(cls)
-        if slots is True:
-            cls_dict = dict(cls.__dict__)
-            cls_dict["__slots__"] = tuple(ca_list)
-            for ca_name in ca_list:
-                # It might not actually be in there, e.g. if using 'these'.
-                cls_dict.pop(ca_name, None)
-            cls_dict.pop("__dict__", None)
-
-            qualname = getattr(cls, "__qualname__", None)
-            cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
-            if qualname is not None:
-                cls.__qualname__ = qualname
-
-        return cls
-
-    # attrs_or class type depends on the usage of the decorator.  It's a class
-    # if it's used as `@attributes` but ``None`` if used # as `@attributes()`.
+            # Raise TypeError on attempts to hash.
+            if cache_hash:
+                raise TypeError(
+                    "Invalid value for cache_hash.  To use hash caching,"
+                    " hashing must be either explicitly or implicitly "
+                    "enabled."
+                )
+            builder.make_unhashable()
+
+        if _determine_whether_to_implement(
+            cls, init, auto_detect, ("__init__",)
+        ):
+            builder.add_init()
+        else:
+            builder.add_attrs_init()
+            if cache_hash:
+                raise TypeError(
+                    "Invalid value for cache_hash.  To use hash caching,"
+                    " init must be True."
+                )
+
+        if (
+            PY310
+            and match_args
+            and not _has_own_attribute(cls, "__match_args__")
+        ):
+            builder.add_match_args()
+
+        return builder.build_class()
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
     if maybe_cls is None:
         return wrap
     else:
         return wrap(maybe_cls)
 
 
-if PY2:
-    def _has_frozen_superclass(cls):
-        """
-        Check whether *cls* has a frozen ancestor by looking at its
-        __setattr__.
-        """
-        return (
-            getattr(
-                cls.__setattr__, "__module__", None
-            ) == _frozen_setattrs.__module__ and
-            cls.__setattr__.__name__ == _frozen_setattrs.__name__
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+    """
+    Check whether *cls* has a frozen ancestor by looking at its
+    __setattr__.
+    """
+    return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+    """
+    Create a "filename" suitable for a function being generated.
+    """
+    unique_filename = "<attrs generated {} {}.{}>".format(
+        func_name,
+        cls.__module__,
+        getattr(cls, "__qualname__", cls.__name__),
+    )
+    return unique_filename
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+    attrs = tuple(
+        a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+    )
+
+    tab = "        "
+
+    unique_filename = _generate_unique_filename(cls, "hash")
+    type_hash = hash(unique_filename)
+    # If eq is custom generated, we need to include the functions in globs
+    globs = {}
+
+    hash_def = "def __hash__(self"
+    hash_func = "hash(("
+    closing_braces = "))"
+    if not cache_hash:
+        hash_def += "):"
+    else:
+        hash_def += ", *"
+
+        hash_def += (
+            ", _cache_wrapper="
+            + "__import__('attr._make')._make._CacheHashWrapper):"
         )
-else:
-    def _has_frozen_superclass(cls):
+        hash_func = "_cache_wrapper(" + hash_func
+        closing_braces += ")"
+
+    method_lines = [hash_def]
+
+    def append_hash_computation_lines(prefix, indent):
         """
-        Check whether *cls* has a frozen ancestor by looking at its
-        __setattr__.
+        Generate the code for actually computing the hash code.
+        Below this will either be returned directly or used to compute
+        a value which is then cached, depending on the value of cache_hash
         """
-        return cls.__setattr__ == _frozen_setattrs
-
-
-def _attrs_to_tuple(obj, attrs):
-    """
-    Create a tuple of all values of *obj*'s *attrs*.
-    """
-    return tuple(getattr(obj, a.name) for a in attrs)
-
-
-def _add_hash(cls, attrs=None):
+
+        method_lines.extend(
+            [
+                indent + prefix + hash_func,
+                indent + "        %d," % (type_hash,),
+            ]
+        )
+
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = "_%s_key" % (a.name,)
+                globs[cmp_name] = a.eq_key
+                method_lines.append(
+                    indent + "        %s(self.%s)," % (cmp_name, a.name)
+                )
+            else:
+                method_lines.append(indent + "        self.%s," % a.name)
+
+        method_lines.append(indent + "    " + closing_braces)
+
+    if cache_hash:
+        method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+        if frozen:
+            append_hash_computation_lines(
+                "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+            )
+            method_lines.append(tab * 2 + ")")  # close __setattr__
+        else:
+            append_hash_computation_lines(
+                "self.%s = " % _hash_cache_field, tab * 2
+            )
+        method_lines.append(tab + "return self.%s" % _hash_cache_field)
+    else:
+        append_hash_computation_lines("return ", tab)
+
+    script = "\n".join(method_lines)
+    return _make_method("__hash__", script, unique_filename, globs)
+
+
+def _add_hash(cls, attrs):
     """
     Add a hash method to *cls*.
     """
-    if attrs is None:
-        attrs = [a
-                 for a in cls.__attrs_attrs__
-                 if a.hash is True or (a.hash is None and a.cmp is True)]
-
-    def hash_(self):
+    cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+    return cls
+
+
+def _make_ne():
+    """
+    Create __ne__ method.
+    """
+
+    def __ne__(self, other):
         """
-        Automatically created by attrs.
+        Check equality and either forward a NotImplemented or
+        return the result negated.
         """
-        return hash(_attrs_to_tuple(self, attrs))
-
-    cls.__hash__ = hash_
-    return cls
-
-
-def _add_cmp(cls, attrs=None):
+        result = self.__eq__(other)
+        if result is NotImplemented:
+            return NotImplemented
+
+        return not result
+
+    return __ne__
+
+
+def _make_eq(cls, attrs):
+    """
+    Create __eq__ method for *cls* with *attrs*.
     """
-    Add comparison methods to *cls*.
+    attrs = [a for a in attrs if a.eq]
+
+    unique_filename = _generate_unique_filename(cls, "eq")
+    lines = [
+        "def __eq__(self, other):",
+        "    if other.__class__ is not self.__class__:",
+        "        return NotImplemented",
+    ]
+
+    # We can't just do a big self.x = other.x and... clause due to
+    # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+    globs = {}
+    if attrs:
+        lines.append("    return  (")
+        others = ["    ) == ("]
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = "_%s_key" % (a.name,)
+                # Add the key function to the global namespace
+                # of the evaluated function.
+                globs[cmp_name] = a.eq_key
+                lines.append(
+                    "        %s(self.%s),"
+                    % (
+                        cmp_name,
+                        a.name,
+                    )
+                )
+                others.append(
+                    "        %s(other.%s),"
+                    % (
+                        cmp_name,
+                        a.name,
+                    )
+                )
+            else:
+                lines.append("        self.%s," % (a.name,))
+                others.append("        other.%s," % (a.name,))
+
+        lines += others + ["    )"]
+    else:
+        lines.append("    return True")
+
+    script = "\n".join(lines)
+
+    return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
     """
-    if attrs is None:
-        attrs = [a for a in cls.__attrs_attrs__ if a.cmp]
+    Create ordering methods for *cls* with *attrs*.
+    """
+    attrs = [a for a in attrs if a.order]
 
     def attrs_to_tuple(obj):
         """
         Save us some typing.
         """
-        return _attrs_to_tuple(obj, attrs)
-
-    def eq(self, other):
+        return tuple(
+            key(value) if key else value
+            for value, key in (
+                (getattr(obj, a.name), a.order_key) for a in attrs
+            )
+        )
+
+    def __lt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __le__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __gt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __ge__(self, other):
         """
         Automatically created by attrs.
         """
         if other.__class__ is self.__class__:
-            return attrs_to_tuple(self) == attrs_to_tuple(other)
-        else:
-            return NotImplemented
-
-    def ne(self, other):
-        """
-        Automatically created by attrs.
-        """
-        result = eq(self, other)
-        if result is NotImplemented:
-            return NotImplemented
-        else:
-            return not result
-
-    def lt(self, other):
-        """
-        Automatically created by attrs.
-        """
-        if isinstance(other, self.__class__):
-            return attrs_to_tuple(self) < attrs_to_tuple(other)
-        else:
-            return NotImplemented
-
-    def le(self, other):
-        """
-        Automatically created by attrs.
-        """
-        if isinstance(other, self.__class__):
-            return attrs_to_tuple(self) <= attrs_to_tuple(other)
-        else:
-            return NotImplemented
-
-    def gt(self, other):
-        """
-        Automatically created by attrs.
-        """
-        if isinstance(other, self.__class__):
-            return attrs_to_tuple(self) > attrs_to_tuple(other)
-        else:
-            return NotImplemented
-
-    def ge(self, other):
-        """
-        Automatically created by attrs.
-        """
-        if isinstance(other, self.__class__):
             return attrs_to_tuple(self) >= attrs_to_tuple(other)
-        else:
-            return NotImplemented
-
-    cls.__eq__ = eq
-    cls.__ne__ = ne
-    cls.__lt__ = lt
-    cls.__le__ = le
-    cls.__gt__ = gt
-    cls.__ge__ = ge
+
+        return NotImplemented
+
+    return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+    """
+    Add equality methods to *cls* with *attrs*.
+    """
+    if attrs is None:
+        attrs = cls.__attrs_attrs__
+
+    cls.__eq__ = _make_eq(cls, attrs)
+    cls.__ne__ = _make_ne()
 
     return cls
 
 
+if HAS_F_STRINGS:
+
+    def _make_repr(attrs, ns, cls):
+        unique_filename = _generate_unique_filename(cls, "repr")
+        # Figure out which attributes to include, and which function to use to
+        # format them. The a.repr value can be either bool or a custom
+        # callable.
+        attr_names_with_reprs = tuple(
+            (a.name, (repr if a.repr is True else a.repr), a.init)
+            for a in attrs
+            if a.repr is not False
+        )
+        globs = {
+            name + "_repr": r
+            for name, r, _ in attr_names_with_reprs
+            if r != repr
+        }
+        globs["_compat"] = _compat
+        globs["AttributeError"] = AttributeError
+        globs["NOTHING"] = NOTHING
+        attribute_fragments = []
+        for name, r, i in attr_names_with_reprs:
+            accessor = (
+                "self." + name
+                if i
+                else 'getattr(self, "' + name + '", NOTHING)'
+            )
+            fragment = (
+                "%s={%s!r}" % (name, accessor)
+                if r == repr
+                else "%s={%s_repr(%s)}" % (name, name, accessor)
+            )
+            attribute_fragments.append(fragment)
+        repr_fragment = ", ".join(attribute_fragments)
+
+        if ns is None:
+            cls_name_fragment = (
+                '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+            )
+        else:
+            cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+        lines = [
+            "def __repr__(self):",
+            "  try:",
+            "    already_repring = _compat.repr_context.already_repring",
+            "  except AttributeError:",
+            "    already_repring = {id(self),}",
+            "    _compat.repr_context.already_repring = already_repring",
+            "  else:",
+            "    if id(self) in already_repring:",
+            "      return '...'",
+            "    else:",
+            "      already_repring.add(id(self))",
+            "  try:",
+            "    return f'%s(%s)'" % (cls_name_fragment, repr_fragment),
+            "  finally:",
+            "    already_repring.remove(id(self))",
+        ]
+
+        return _make_method(
+            "__repr__", "\n".join(lines), unique_filename, globs=globs
+        )
+
+else:
+
+    def _make_repr(attrs, ns, _):
+        """
+        Make a repr method that includes relevant *attrs*, adding *ns* to the
+        full name.
+        """
+
+        # Figure out which attributes to include, and which function to use to
+        # format them. The a.repr value can be either bool or a custom
+        # callable.
+        attr_names_with_reprs = tuple(
+            (a.name, repr if a.repr is True else a.repr)
+            for a in attrs
+            if a.repr is not False
+        )
+
+        def __repr__(self):
+            """
+            Automatically created by attrs.
+            """
+            try:
+                already_repring = _compat.repr_context.already_repring
+            except AttributeError:
+                already_repring = set()
+                _compat.repr_context.already_repring = already_repring
+
+            if id(self) in already_repring:
+                return "..."
+            real_cls = self.__class__
+            if ns is None:
+                class_name = real_cls.__qualname__.rsplit(">.", 1)[-1]
+            else:
+                class_name = ns + "." + real_cls.__name__
+
+            # Since 'self' remains on the stack (i.e.: strongly referenced)
+            # for the duration of this call, it's safe to depend on id(...)
+            # stability, and not need to track the instance and therefore
+            # worry about properties like weakref- or hash-ability.
+            already_repring.add(id(self))
+            try:
+                result = [class_name, "("]
+                first = True
+                for name, attr_repr in attr_names_with_reprs:
+                    if first:
+                        first = False
+                    else:
+                        result.append(", ")
+                    result.extend(
+                        (name, "=", attr_repr(getattr(self, name, NOTHING)))
+                    )
+                return "".join(result) + ")"
+            finally:
+                already_repring.remove(id(self))
+
+        return __repr__
+
+
 def _add_repr(cls, ns=None, attrs=None):
     """
     Add a repr method to *cls*.
     """
     if attrs is None:
-        attrs = [a for a in cls.__attrs_attrs__ if a.repr]
-
-    def repr_(self):
-        """
-        Automatically created by attrs.
-        """
-        real_cls = self.__class__
-        if ns is None:
-            qualname = getattr(real_cls, "__qualname__", None)
-            if qualname is not None:
-                class_name = qualname.rsplit(">.", 1)[-1]
-            else:
-                class_name = real_cls.__name__
-        else:
-            class_name = ns + "." + real_cls.__name__
-
-        return "{0}({1})".format(
-            class_name,
-            ", ".join(a.name + "=" + repr(getattr(self, a.name))
-                      for a in attrs)
-        )
-    cls.__repr__ = repr_
-    return cls
-
-
-def _add_init(cls, frozen):
-    """
-    Add a __init__ method to *cls*.  If *frozen* is True, make it immutable.
-    """
-    attrs = [a for a in cls.__attrs_attrs__
-             if a.init or a.default is not NOTHING]
-
-    # We cache the generated init methods for the same kinds of attributes.
-    sha1 = hashlib.sha1()
-    r = repr(attrs)
-    if not isinstance(r, bytes):
-        r = r.encode('utf-8')
-    sha1.update(r)
-    unique_filename = "<attrs generated init {0}>".format(
-        sha1.hexdigest()
-    )
-
-    script, globs = _attrs_to_script(
-        attrs,
-        frozen,
-        getattr(cls, "__attrs_post_init__", False),
-    )
-    locs = {}
-    bytecode = compile(script, unique_filename, "exec")
-    attr_dict = dict((a.name, a) for a in attrs)
-    globs.update({
-        "NOTHING": NOTHING,
-        "attr_dict": attr_dict,
-    })
-    if frozen is True:
-        # Save the lookup overhead in __init__ if we need to circumvent
-        # immutability.
-        globs["_cached_setattr"] = _obj_setattr
-    eval(bytecode, globs, locs)
-    init = locs["__init__"]
-
-    # In order of debuggers like PDB being able to step through the code,
-    # we add a fake linecache entry.
-    linecache.cache[unique_filename] = (
-        len(script),
-        None,
-        script.splitlines(True),
-        unique_filename
-    )
-    cls.__init__ = init
-    return cls
-
-
-def _add_pickle(cls):
-    """
-    Add pickle helpers, needed for frozen and slotted classes
-    """
-    def _slots_getstate__(obj):
-        """
-        Play nice with pickle.
-        """
-        return tuple(getattr(obj, a.name) for a in fields(obj.__class__))
-
-    def _slots_setstate__(obj, state):
-        """
-        Play nice with pickle.
-        """
-        __bound_setattr = _obj_setattr.__get__(obj, Attribute)
-        for a, value in zip(fields(obj.__class__), state):
-            __bound_setattr(a.name, value)
-
-    cls.__getstate__ = _slots_getstate__
-    cls.__setstate__ = _slots_setstate__
+        attrs = cls.__attrs_attrs__
+
+    cls.__repr__ = _make_repr(attrs, ns, cls)
     return cls
 
 
 def fields(cls):
     """
-    Returns the tuple of ``attrs`` attributes for a class.
+    Return the tuple of ``attrs`` attributes for a class.
 
     The tuple also allows accessing the fields by their names (see below for
     examples).
@@ -630,12 +1980,12 @@
     :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
         class.
 
-    :rtype: tuple (with name accesors) of :class:`attr.Attribute`
+    :rtype: tuple (with name accessors) of `attrs.Attribute`
 
     ..  versionchanged:: 16.2.0 Returned tuple allows accessing the fields
         by name.
     """
-    if not isclass(cls):
+    if not isinstance(cls, type):
         raise TypeError("Passed object must be a class.")
     attrs = getattr(cls, "__attrs_attrs__", None)
     if attrs is None:
@@ -645,6 +1995,34 @@
     return attrs
 
 
+def fields_dict(cls):
+    """
+    Return an ordered dictionary of ``attrs`` attributes for a class, whose
+    keys are the attribute names.
+
+    :param type cls: Class to introspect.
+
+    :raise TypeError: If *cls* is not a class.
+    :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+        class.
+
+    :rtype: an ordered dict where keys are attribute names and values are
+        `attrs.Attribute`\\ s. This will be a `dict` if it's
+        naturally ordered like on Python 3.6+ or an
+        :class:`~collections.OrderedDict` otherwise.
+
+    .. versionadded:: 18.1.0
+    """
+    if not isinstance(cls, type):
+        raise TypeError("Passed object must be a class.")
+    attrs = getattr(cls, "__attrs_attrs__", None)
+    if attrs is None:
+        raise NotAnAttrsClassError(
+            "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+        )
+    return ordered_dict((a.name, a) for a in attrs)
+
+
 def validate(inst):
     """
     Validate all attributes on *inst* that have a validator.
@@ -662,240 +2040,623 @@
             v(inst, a, getattr(inst, a.name))
 
 
-def _attrs_to_script(attrs, frozen, post_init):
+def _is_slot_cls(cls):
+    return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+    """
+    Check if the attribute name comes from a slot class.
+    """
+    return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+    cls,
+    attrs,
+    pre_init,
+    post_init,
+    frozen,
+    slots,
+    cache_hash,
+    base_attr_map,
+    is_exc,
+    cls_on_setattr,
+    attrs_init,
+):
+    has_cls_on_setattr = (
+        cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+    )
+
+    if frozen and has_cls_on_setattr:
+        raise ValueError("Frozen classes can't use on_setattr.")
+
+    needs_cached_setattr = cache_hash or frozen
+    filtered_attrs = []
+    attr_dict = {}
+    for a in attrs:
+        if not a.init and a.default is NOTHING:
+            continue
+
+        filtered_attrs.append(a)
+        attr_dict[a.name] = a
+
+        if a.on_setattr is not None:
+            if frozen is True:
+                raise ValueError("Frozen classes can't use on_setattr.")
+
+            needs_cached_setattr = True
+        elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+            needs_cached_setattr = True
+
+    unique_filename = _generate_unique_filename(cls, "init")
+
+    script, globs, annotations = _attrs_to_init_script(
+        filtered_attrs,
+        frozen,
+        slots,
+        pre_init,
+        post_init,
+        cache_hash,
+        base_attr_map,
+        is_exc,
+        has_cls_on_setattr,
+        attrs_init,
+    )
+    if cls.__module__ in sys.modules:
+        # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+        globs.update(sys.modules[cls.__module__].__dict__)
+
+    globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+    if needs_cached_setattr:
+        # Save the lookup overhead in __init__ if we need to circumvent
+        # setattr hooks.
+        globs["_setattr"] = _obj_setattr
+
+    init = _make_method(
+        "__attrs_init__" if attrs_init else "__init__",
+        script,
+        unique_filename,
+        globs,
+    )
+    init.__annotations__ = annotations
+
+    return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+    """
+    Use the cached object.setattr to set *attr_name* to *value_var*.
+    """
+    return "_setattr(self, '%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+    """
+    Use the cached object.setattr to set *attr_name* to *value_var*, but run
+    its converter first.
+    """
+    return "_setattr(self, '%s', %s(%s))" % (
+        attr_name,
+        _init_converter_pat % (attr_name,),
+        value_var,
+    )
+
+
+def _assign(attr_name, value, has_on_setattr):
+    """
+    Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+    relegate to _setattr.
+    """
+    if has_on_setattr:
+        return _setattr(attr_name, value, True)
+
+    return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+    """
+    Unless *attr_name* has an on_setattr hook, use normal assignment after
+    conversion. Otherwise relegate to _setattr_with_converter.
+    """
+    if has_on_setattr:
+        return _setattr_with_converter(attr_name, value_var, True)
+
+    return "self.%s = %s(%s)" % (
+        attr_name,
+        _init_converter_pat % (attr_name,),
+        value_var,
+    )
+
+
+def _attrs_to_init_script(
+    attrs,
+    frozen,
+    slots,
+    pre_init,
+    post_init,
+    cache_hash,
+    base_attr_map,
+    is_exc,
+    has_cls_on_setattr,
+    attrs_init,
+):
     """
     Return a script of an initializer for *attrs* and a dict of globals.
 
     The globals are expected by the generated script.
 
-     If *frozen* is True, we cannot set the attributes directly so we use
+    If *frozen* is True, we cannot set the attributes directly so we use
     a cached ``object.__setattr__``.
     """
     lines = []
+    if pre_init:
+        lines.append("self.__attrs_pre_init__()")
+
     if frozen is True:
-        lines.append(
-            # Circumvent the __setattr__ descriptor to save one lookup per
-            # assignment.
-            "_setattr = _cached_setattr.__get__(self, self.__class__)"
-        )
-
-        def fmt_setter(attr_name, value_var):
-            return "_setattr('%(attr_name)s', %(value_var)s)" % {
-                "attr_name": attr_name,
-                "value_var": value_var,
-            }
-
-        def fmt_setter_with_converter(attr_name, value_var):
-            conv_name = _init_convert_pat.format(attr_name)
-            return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
-                "attr_name": attr_name,
-                "value_var": value_var,
-                "conv": conv_name,
-            }
+        if slots is True:
+            fmt_setter = _setattr
+            fmt_setter_with_converter = _setattr_with_converter
+        else:
+            # Dict frozen classes assign directly to __dict__.
+            # But only if the attribute doesn't come from an ancestor slot
+            # class.
+            # Note _inst_dict will be used again below if cache_hash is True
+            lines.append("_inst_dict = self.__dict__")
+
+            def fmt_setter(attr_name, value_var, has_on_setattr):
+                if _is_slot_attr(attr_name, base_attr_map):
+                    return _setattr(attr_name, value_var, has_on_setattr)
+
+                return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+            def fmt_setter_with_converter(
+                attr_name, value_var, has_on_setattr
+            ):
+                if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+                    return _setattr_with_converter(
+                        attr_name, value_var, has_on_setattr
+                    )
+
+                return "_inst_dict['%s'] = %s(%s)" % (
+                    attr_name,
+                    _init_converter_pat % (attr_name,),
+                    value_var,
+                )
+
     else:
-        def fmt_setter(attr_name, value):
-            return "self.%(attr_name)s = %(value)s" % {
-                "attr_name": attr_name,
-                "value": value,
-            }
-
-        def fmt_setter_with_converter(attr_name, value_var):
-            conv_name = _init_convert_pat.format(attr_name)
-            return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
-                "attr_name": attr_name,
-                "value_var": value_var,
-                "conv": conv_name,
-            }
+        # Not frozen.
+        fmt_setter = _assign
+        fmt_setter_with_converter = _assign_with_converter
 
     args = []
+    kw_only_args = []
     attrs_to_validate = []
 
     # This is a dictionary of names to validator and converter callables.
     # Injecting this into __init__ globals lets us avoid lookups.
     names_for_globals = {}
+    annotations = {"return": None}
 
     for a in attrs:
         if a.validator:
             attrs_to_validate.append(a)
+
         attr_name = a.name
+        has_on_setattr = a.on_setattr is not None or (
+            a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+        )
         arg_name = a.name.lstrip("_")
+
         has_factory = isinstance(a.default, Factory)
         if has_factory and a.default.takes_self:
             maybe_self = "self"
         else:
             maybe_self = ""
+
         if a.init is False:
             if has_factory:
                 init_factory_name = _init_factory_pat.format(a.name)
-                if a.convert is not None:
-                    lines.append(fmt_setter_with_converter(
-                        attr_name,
-                        init_factory_name + "({0})".format(maybe_self)))
-                    conv_name = _init_convert_pat.format(a.name)
-                    names_for_globals[conv_name] = a.convert
+                if a.converter is not None:
+                    lines.append(
+                        fmt_setter_with_converter(
+                            attr_name,
+                            init_factory_name + "(%s)" % (maybe_self,),
+                            has_on_setattr,
+                        )
+                    )
+                    conv_name = _init_converter_pat % (a.name,)
+                    names_for_globals[conv_name] = a.converter
                 else:
-                    lines.append(fmt_setter(
-                        attr_name,
-                        init_factory_name + "({0})".format(maybe_self)
-                    ))
+                    lines.append(
+                        fmt_setter(
+                            attr_name,
+                            init_factory_name + "(%s)" % (maybe_self,),
+                            has_on_setattr,
+                        )
+                    )
                 names_for_globals[init_factory_name] = a.default.factory
             else:
-                if a.convert is not None:
-                    lines.append(fmt_setter_with_converter(
-                        attr_name,
-                        "attr_dict['{attr_name}'].default"
-                        .format(attr_name=attr_name)
-                    ))
-                    conv_name = _init_convert_pat.format(a.name)
-                    names_for_globals[conv_name] = a.convert
+                if a.converter is not None:
+                    lines.append(
+                        fmt_setter_with_converter(
+                            attr_name,
+                            "attr_dict['%s'].default" % (attr_name,),
+                            has_on_setattr,
+                        )
+                    )
+                    conv_name = _init_converter_pat % (a.name,)
+                    names_for_globals[conv_name] = a.converter
                 else:
-                    lines.append(fmt_setter(
-                        attr_name,
-                        "attr_dict['{attr_name}'].default"
-                        .format(attr_name=attr_name)
-                    ))
+                    lines.append(
+                        fmt_setter(
+                            attr_name,
+                            "attr_dict['%s'].default" % (attr_name,),
+                            has_on_setattr,
+                        )
+                    )
         elif a.default is not NOTHING and not has_factory:
-            args.append(
-                "{arg_name}=attr_dict['{attr_name}'].default".format(
-                    arg_name=arg_name,
-                    attr_name=attr_name,
+            arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+
+            if a.converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr
+                    )
                 )
-            )
-            if a.convert is not None:
-                lines.append(fmt_setter_with_converter(attr_name, arg_name))
-                names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+                names_for_globals[
+                    _init_converter_pat % (a.name,)
+                ] = a.converter
             else:
-                lines.append(fmt_setter(attr_name, arg_name))
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
         elif has_factory:
-            args.append("{arg_name}=NOTHING".format(arg_name=arg_name))
-            lines.append("if {arg_name} is not NOTHING:"
-                         .format(arg_name=arg_name))
+            arg = "%s=NOTHING" % (arg_name,)
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+            lines.append("if %s is not NOTHING:" % (arg_name,))
+
             init_factory_name = _init_factory_pat.format(a.name)
-            if a.convert is not None:
-                lines.append("    " + fmt_setter_with_converter(attr_name,
-                                                                arg_name))
+            if a.converter is not None:
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr
+                    )
+                )
                 lines.append("else:")
-                lines.append("    " + fmt_setter_with_converter(
-                    attr_name,
-                    init_factory_name + "({0})".format(maybe_self)
-                ))
-                names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                    )
+                )
+                names_for_globals[
+                    _init_converter_pat % (a.name,)
+                ] = a.converter
             else:
-                lines.append("    " + fmt_setter(attr_name, arg_name))
+                lines.append(
+                    "    " + fmt_setter(attr_name, arg_name, has_on_setattr)
+                )
                 lines.append("else:")
-                lines.append("    " + fmt_setter(
-                    attr_name,
-                    init_factory_name + "({0})".format(maybe_self)
-                ))
+                lines.append(
+                    "    "
+                    + fmt_setter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                    )
+                )
             names_for_globals[init_factory_name] = a.default.factory
         else:
-            args.append(arg_name)
-            if a.convert is not None:
-                lines.append(fmt_setter_with_converter(attr_name, arg_name))
-                names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+            if a.kw_only:
+                kw_only_args.append(arg_name)
             else:
-                lines.append(fmt_setter(attr_name, arg_name))
+                args.append(arg_name)
+
+            if a.converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr
+                    )
+                )
+                names_for_globals[
+                    _init_converter_pat % (a.name,)
+                ] = a.converter
+            else:
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+        if a.init is True:
+            if a.type is not None and a.converter is None:
+                annotations[arg_name] = a.type
+            elif a.converter is not None:
+                # Try to get the type from the converter.
+                t = _AnnotationExtractor(a.converter).get_first_param_type()
+                if t:
+                    annotations[arg_name] = t
 
     if attrs_to_validate:  # we can skip this if there are no validators.
         names_for_globals["_config"] = _config
         lines.append("if _config._run_validators is True:")
         for a in attrs_to_validate:
-            val_name = "__attr_validator_{}".format(a.name)
-            attr_name = "__attr_{}".format(a.name)
-            lines.append("    {}(self, {}, self.{})".format(
-                val_name, attr_name, a.name))
+            val_name = "__attr_validator_" + a.name
+            attr_name = "__attr_" + a.name
+            lines.append(
+                "    %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+            )
             names_for_globals[val_name] = a.validator
             names_for_globals[attr_name] = a
+
     if post_init:
         lines.append("self.__attrs_post_init__()")
 
-    return """\
-def __init__(self, {args}):
+    # because this is set only after __attrs_post_init__ is called, a crash
+    # will result if post-init tries to access the hash code.  This seemed
+    # preferable to setting this beforehand, in which case alteration to
+    # field values during post-init combined with post-init accessing the
+    # hash code would result in silent bugs.
+    if cache_hash:
+        if frozen:
+            if slots:
+                # if frozen and slots, then _setattr defined above
+                init_hash_cache = "_setattr(self, '%s', %s)"
+            else:
+                # if frozen and not slots, then _inst_dict defined above
+                init_hash_cache = "_inst_dict['%s'] = %s"
+        else:
+            init_hash_cache = "self.%s = %s"
+        lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+    # For exceptions we rely on BaseException.__init__ for proper
+    # initialization.
+    if is_exc:
+        vals = ",".join("self." + a.name for a in attrs if a.init)
+
+        lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+    args = ", ".join(args)
+    if kw_only_args:
+        args += "%s*, %s" % (
+            ", " if args else "",  # leading comma
+            ", ".join(kw_only_args),  # kw_only args
+        )
+    return (
+        """\
+def {init_name}(self, {args}):
     {lines}
 """.format(
-        args=", ".join(args),
-        lines="\n    ".join(lines) if lines else "pass",
-    ), names_for_globals
-
-
-class Attribute(object):
+            init_name=("__attrs_init__" if attrs_init else "__init__"),
+            args=args,
+            lines="\n    ".join(lines) if lines else "pass",
+        ),
+        names_for_globals,
+        annotations,
+    )
+
+
+class Attribute:
     """
     *Read-only* representation of an attribute.
 
-    :attribute name: The name of the attribute.
-
-    Plus *all* arguments of :func:`attr.ib`.
+    The class has *all* arguments of `attr.ib` (except for ``factory``
+    which is only syntactic sugar for ``default=Factory(...)`` plus the
+    following:
+
+    - ``name`` (`str`): The name of the attribute.
+    - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+      from a base class.
+    - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables
+      that are used for comparing and ordering objects by this attribute,
+      respectively. These are set by passing a callable to `attr.ib`'s ``eq``,
+      ``order``, or ``cmp`` arguments. See also :ref:`comparison customization
+      <custom-comparison>`.
+
+    Instances of this class are frequently used for introspection purposes
+    like:
+
+    - `fields` returns a tuple of them.
+    - Validators get them passed as the first argument.
+    - The :ref:`field transformer <transform-fields>` hook receives a list of
+      them.
+
+    .. versionadded:: 20.1.0 *inherited*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+        equality checks and hashing anymore.
+    .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+    For the full version history of the fields, see `attr.ib`.
     """
+
     __slots__ = (
-        "name", "default", "validator", "repr", "cmp", "hash", "init",
-        "convert", "metadata",
+        "name",
+        "default",
+        "validator",
+        "repr",
+        "eq",
+        "eq_key",
+        "order",
+        "order_key",
+        "hash",
+        "init",
+        "metadata",
+        "type",
+        "converter",
+        "kw_only",
+        "inherited",
+        "on_setattr",
     )
 
-    def __init__(self, name, default, validator, repr, cmp, hash, init,
-                 convert=None, metadata=None):
+    def __init__(
+        self,
+        name,
+        default,
+        validator,
+        repr,
+        cmp,  # XXX: unused, remove along with other cmp code.
+        hash,
+        init,
+        inherited,
+        metadata=None,
+        type=None,
+        converter=None,
+        kw_only=False,
+        eq=None,
+        eq_key=None,
+        order=None,
+        order_key=None,
+        on_setattr=None,
+    ):
+        eq, eq_key, order, order_key = _determine_attrib_eq_order(
+            cmp, eq_key or eq, order_key or order, True
+        )
+
         # Cache this descriptor here to speed things up later.
         bound_setattr = _obj_setattr.__get__(self, Attribute)
 
+        # Despite the big red warning, people *do* instantiate `Attribute`
+        # themselves.
         bound_setattr("name", name)
         bound_setattr("default", default)
         bound_setattr("validator", validator)
         bound_setattr("repr", repr)
-        bound_setattr("cmp", cmp)
+        bound_setattr("eq", eq)
+        bound_setattr("eq_key", eq_key)
+        bound_setattr("order", order)
+        bound_setattr("order_key", order_key)
         bound_setattr("hash", hash)
         bound_setattr("init", init)
-        bound_setattr("convert", convert)
-        bound_setattr("metadata", (metadata_proxy(metadata) if metadata
-                                   else _empty_metadata_singleton))
+        bound_setattr("converter", converter)
+        bound_setattr(
+            "metadata",
+            (
+                types.MappingProxyType(dict(metadata))  # Shallow copy
+                if metadata
+                else _empty_metadata_singleton
+            ),
+        )
+        bound_setattr("type", type)
+        bound_setattr("kw_only", kw_only)
+        bound_setattr("inherited", inherited)
+        bound_setattr("on_setattr", on_setattr)
 
     def __setattr__(self, name, value):
         raise FrozenInstanceError()
 
     @classmethod
-    def from_counting_attr(cls, name, ca):
+    def from_counting_attr(cls, name, ca, type=None):
+        # type holds the annotated value. deal with conflicts:
+        if type is None:
+            type = ca.type
+        elif ca.type is not None:
+            raise ValueError(
+                "Type annotation and type argument cannot both be present"
+            )
         inst_dict = {
             k: getattr(ca, k)
-            for k
-            in Attribute.__slots__
-            if k not in (
-                "name", "validator", "default",
-            )  # exclude methods
+            for k in Attribute.__slots__
+            if k
+            not in (
+                "name",
+                "validator",
+                "default",
+                "type",
+                "inherited",
+            )  # exclude methods and deprecated alias
         }
-        return cls(name=name, validator=ca._validator, default=ca._default,
-                   **inst_dict)
+        return cls(
+            name=name,
+            validator=ca._validator,
+            default=ca._default,
+            type=type,
+            cmp=None,
+            inherited=False,
+            **inst_dict
+        )
+
+    # Don't use attr.evolve since fields(Attribute) doesn't work
+    def evolve(self, **changes):
+        """
+        Copy *self* and apply *changes*.
+
+        This works similarly to `attr.evolve` but that function does not work
+        with ``Attribute``.
+
+        It is mainly meant to be used for `transform-fields`.
+
+        .. versionadded:: 20.3.0
+        """
+        new = copy.copy(self)
+
+        new._setattrs(changes.items())
+
+        return new
 
     # Don't use _add_pickle since fields(Attribute) doesn't work
     def __getstate__(self):
         """
         Play nice with pickle.
         """
-        return tuple(getattr(self, name) if name != "metadata"
-                     else dict(self.metadata)
-                     for name in self.__slots__)
+        return tuple(
+            getattr(self, name) if name != "metadata" else dict(self.metadata)
+            for name in self.__slots__
+        )
 
     def __setstate__(self, state):
         """
         Play nice with pickle.
         """
+        self._setattrs(zip(self.__slots__, state))
+
+    def _setattrs(self, name_values_pairs):
         bound_setattr = _obj_setattr.__get__(self, Attribute)
-        for name, value in zip(self.__slots__, state):
+        for name, value in name_values_pairs:
             if name != "metadata":
                 bound_setattr(name, value)
             else:
-                bound_setattr(name, metadata_proxy(value) if value else
-                              _empty_metadata_singleton)
-
-
-_a = [Attribute(name=name, default=NOTHING, validator=None,
-                repr=True, cmp=True, hash=(name != "metadata"), init=True)
-      for name in Attribute.__slots__]
+                bound_setattr(
+                    name,
+                    types.MappingProxyType(dict(value))
+                    if value
+                    else _empty_metadata_singleton,
+                )
+
+
+_a = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=(name != "metadata"),
+        init=True,
+        inherited=False,
+    )
+    for name in Attribute.__slots__
+]
 
 Attribute = _add_hash(
-    _add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
-    attrs=[a for a in _a if a.hash]
+    _add_eq(
+        _add_repr(Attribute, attrs=_a),
+        attrs=[a for a in _a if a.name != "inherited"],
+    ),
+    attrs=[a for a in _a if a.hash and a.name != "inherited"],
 )
 
 
-class _CountingAttr(object):
+class _CountingAttr:
     """
     Intermediate representation of attributes that uses a counter to preserve
     the order in which the attributes have been defined.
@@ -903,35 +2664,105 @@
     *Internal* data structure of the attrs library.  Running into is most
     likely the result of a bug like a forgotten `@attr.s` decorator.
     """
-    __slots__ = ("counter", "_default", "repr", "cmp", "hash", "init",
-                 "metadata", "_validator", "convert")
+
+    __slots__ = (
+        "counter",
+        "_default",
+        "repr",
+        "eq",
+        "eq_key",
+        "order",
+        "order_key",
+        "hash",
+        "init",
+        "metadata",
+        "_validator",
+        "converter",
+        "type",
+        "kw_only",
+        "on_setattr",
+    )
     __attrs_attrs__ = tuple(
-        Attribute(name=name, default=NOTHING, validator=None,
-                  repr=True, cmp=True, hash=True, init=True)
-        for name
-        in ("counter", "_default", "repr", "cmp", "hash", "init",)
+        Attribute(
+            name=name,
+            default=NOTHING,
+            validator=None,
+            repr=True,
+            cmp=None,
+            hash=True,
+            init=True,
+            kw_only=False,
+            eq=True,
+            eq_key=None,
+            order=False,
+            order_key=None,
+            inherited=False,
+            on_setattr=None,
+        )
+        for name in (
+            "counter",
+            "_default",
+            "repr",
+            "eq",
+            "order",
+            "hash",
+            "init",
+            "on_setattr",
+        )
     ) + (
-        Attribute(name="metadata", default=None, validator=None,
-                  repr=True, cmp=True, hash=False, init=True),
+        Attribute(
+            name="metadata",
+            default=None,
+            validator=None,
+            repr=True,
+            cmp=None,
+            hash=False,
+            init=True,
+            kw_only=False,
+            eq=True,
+            eq_key=None,
+            order=False,
+            order_key=None,
+            inherited=False,
+            on_setattr=None,
+        ),
     )
     cls_counter = 0
 
-    def __init__(self, default, validator, repr, cmp, hash, init, convert,
-                 metadata):
+    def __init__(
+        self,
+        default,
+        validator,
+        repr,
+        cmp,
+        hash,
+        init,
+        converter,
+        metadata,
+        type,
+        kw_only,
+        eq,
+        eq_key,
+        order,
+        order_key,
+        on_setattr,
+    ):
         _CountingAttr.cls_counter += 1
         self.counter = _CountingAttr.cls_counter
         self._default = default
-        # If validator is a list/tuple, wrap it using helper validator.
-        if validator and isinstance(validator, (list, tuple)):
-            self._validator = and_(*validator)
-        else:
-            self._validator = validator
+        self._validator = validator
+        self.converter = converter
         self.repr = repr
-        self.cmp = cmp
+        self.eq = eq
+        self.eq_key = eq_key
+        self.order = order
+        self.order_key = order_key
         self.hash = hash
         self.init = init
-        self.convert = convert
         self.metadata = metadata
+        self.type = type
+        self.kw_only = kw_only
+        self.on_setattr = on_setattr
 
     def validator(self, meth):
         """
@@ -965,15 +2796,14 @@
         return meth
 
 
-_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
-
-
-@attributes(slots=True, init=False)
-class Factory(object):
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory:
     """
     Stores a factory callable.
 
-    If passed as the default value to :func:`attr.ib`, the factory is used to
+    If passed as the default value to `attrs.field`, the factory is used to
     generate a new value.
 
     :param callable factory: A callable that takes either none or exactly one
@@ -983,8 +2813,8 @@
 
     .. versionadded:: 17.1.0  *takes_self*
     """
-    factory = attr()
-    takes_self = attr()
+
+    __slots__ = ("factory", "takes_self")
 
     def __init__(self, factory, takes_self=False):
         """
@@ -994,47 +2824,122 @@
         self.factory = factory
         self.takes_self = takes_self
 
+    def __getstate__(self):
+        """
+        Play nice with pickle.
+        """
+        return tuple(getattr(self, name) for name in self.__slots__)
+
+    def __setstate__(self, state):
+        """
+        Play nice with pickle.
+        """
+        for name, value in zip(self.__slots__, state):
+            setattr(self, name, value)
+
+
+_f = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=True,
+        init=True,
+        inherited=False,
+    )
+    for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
 
 def make_class(name, attrs, bases=(object,), **attributes_arguments):
     """
     A quick way to create a new class called *name* with *attrs*.
 
-    :param name: The name for the new class.
-    :type name: str
+    :param str name: The name for the new class.
 
     :param attrs: A list of names or a dictionary of mappings of names to
         attributes.
-    :type attrs: :class:`list` or :class:`dict`
+
+        If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+        `collections.OrderedDict` otherwise), the order is deduced from
+        the order of the names or attributes inside *attrs*.  Otherwise the
+        order of the definition of the attributes is used.
+    :type attrs: `list` or `dict`
 
     :param tuple bases: Classes that the new class will subclass.
 
-    :param attributes_arguments: Passed unmodified to :func:`attr.s`.
+    :param attributes_arguments: Passed unmodified to `attr.s`.
 
     :return: A new class with *attrs*.
     :rtype: type
 
-    ..  versionadded:: 17.1.0 *bases*
+    .. versionadded:: 17.1.0 *bases*
+    .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
     """
     if isinstance(attrs, dict):
         cls_dict = attrs
     elif isinstance(attrs, (list, tuple)):
-        cls_dict = dict((a, attr()) for a in attrs)
+        cls_dict = {a: attrib() for a in attrs}
     else:
         raise TypeError("attrs argument must be a dict or a list.")
 
-    return attributes(**attributes_arguments)(type(name, bases, cls_dict))
-
-
-# These are required by whithin this module so we define them here and merely
-# import into .validators.
-
-
-@attributes(slots=True, hash=True)
-class _AndValidator(object):
+    pre_init = cls_dict.pop("__attrs_pre_init__", None)
+    post_init = cls_dict.pop("__attrs_post_init__", None)
+    user_init = cls_dict.pop("__init__", None)
+
+    body = {}
+    if pre_init is not None:
+        body["__attrs_pre_init__"] = pre_init
+    if post_init is not None:
+        body["__attrs_post_init__"] = post_init
+    if user_init is not None:
+        body["__init__"] = user_init
+
+    type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+    # For pickling to work, the __module__ variable needs to be set to the
+    # frame where the class is created.  Bypass this step in environments where
+    # sys._getframe is not defined (Jython for example) or sys._getframe is not
+    # defined for arguments greater than 0 (IronPython).
+    try:
+        type_.__module__ = sys._getframe(1).f_globals.get(
+            "__name__", "__main__"
+        )
+    except (AttributeError, ValueError):
+        pass
+
+    # We do it here for proper warnings with meaningful stacklevel.
+    cmp = attributes_arguments.pop("cmp", None)
+    (
+        attributes_arguments["eq"],
+        attributes_arguments["order"],
+    ) = _determine_attrs_eq_order(
+        cmp,
+        attributes_arguments.get("eq"),
+        attributes_arguments.get("order"),
+        True,
+    )
+
+    return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator:
     """
     Compose many validators to a single one.
     """
-    _validators = attr()
+
+    _validators = attrib()
 
     def __call__(self, inst, attr, value):
         for v in self._validators:
@@ -1047,16 +2952,55 @@
 
     When called on a value, it runs all wrapped validators.
 
-    :param validators: Arbitrary number of validators.
-    :type validators: callables
+    :param callables validators: Arbitrary number of validators.
 
     .. versionadded:: 17.1.0
     """
     vals = []
     for validator in validators:
         vals.extend(
-            validator._validators if isinstance(validator, _AndValidator)
+            validator._validators
+            if isinstance(validator, _AndValidator)
             else [validator]
         )
 
     return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+    """
+    A converter that composes multiple converters into one.
+
+    When called on a value, it runs all wrapped converters, returning the
+    *last* value.
+
+    Type annotations will be inferred from the wrapped converters', if
+    they have any.
+
+    :param callables converters: Arbitrary number of converters.
+
+    .. versionadded:: 20.1.0
+    """
+
+    def pipe_converter(val):
+        for converter in converters:
+            val = converter(val)
+
+        return val
+
+    if not converters:
+        # If the converter list is empty, pipe_converter is the identity.
+        A = typing.TypeVar("A")
+        pipe_converter.__annotations__ = {"val": A, "return": A}
+    else:
+        # Get parameter type from first converter.
+        t = _AnnotationExtractor(converters[0]).get_first_param_type()
+        if t:
+            pipe_converter.__annotations__["val"] = t
+
+        # Get return type from last converter.
+        rt = _AnnotationExtractor(converters[-1]).get_return_type()
+        if rt:
+            pipe_converter.__annotations__["return"] = rt
+
+    return pipe_converter
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_next_gen.py	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,220 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+    NOTHING,
+    _frozen_setattrs,
+    _ng_default_on_setattr,
+    attrib,
+    attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+    maybe_cls=None,
+    *,
+    these=None,
+    repr=None,
+    hash=None,
+    init=None,
+    slots=True,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=None,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=True,
+    eq=None,
+    order=False,
+    auto_detect=True,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+):
+    r"""
+    Define an ``attrs`` class.
+
+    Differences to the classic `attr.s` that it uses underneath:
+
+    - Automatically detect whether or not *auto_attribs* should be `True` (c.f.
+      *auto_attribs* parameter).
+    - If *frozen* is `False`, run converters and validators when setting an
+      attribute by default.
+    - *slots=True*
+
+      .. caution::
+
+         Usually this has only upsides and few visible effects in everyday
+         programming. But it *can* lead to some suprising behaviors, so please
+         make sure to read :term:`slotted classes`.
+    - *auto_exc=True*
+    - *auto_detect=True*
+    - *order=False*
+    - Some options that were only relevant on Python 2 or were kept around for
+      backwards-compatibility have been removed.
+
+    Please note that these are all defaults and you can change them as you
+    wish.
+
+    :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+       exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+       1. If any attributes are annotated and no unannotated `attrs.fields`\ s
+          are found, it assumes *auto_attribs=True*.
+       2. Otherwise it assumes *auto_attribs=False* and tries to collect
+          `attrs.fields`\ s.
+
+    For now, please refer to `attr.s` for the rest of the parameters.
+
+    .. versionadded:: 20.1.0
+    .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+    """
+
+    def do_it(cls, auto_attribs):
+        return attrs(
+            maybe_cls=cls,
+            these=these,
+            repr=repr,
+            hash=hash,
+            init=init,
+            slots=slots,
+            frozen=frozen,
+            weakref_slot=weakref_slot,
+            str=str,
+            auto_attribs=auto_attribs,
+            kw_only=kw_only,
+            cache_hash=cache_hash,
+            auto_exc=auto_exc,
+            eq=eq,
+            order=order,
+            auto_detect=auto_detect,
+            collect_by_mro=True,
+            getstate_setstate=getstate_setstate,
+            on_setattr=on_setattr,
+            field_transformer=field_transformer,
+            match_args=match_args,
+        )
+
+    def wrap(cls):
+        """
+        Making this a wrapper ensures this code runs during class creation.
+
+        We also ensure that frozen-ness of classes is inherited.
+        """
+        nonlocal frozen, on_setattr
+
+        had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+        # By default, mutable classes convert & validate on setattr.
+        if frozen is False and on_setattr is None:
+            on_setattr = _ng_default_on_setattr
+
+        # However, if we subclass a frozen class, we inherit the immutability
+        # and disable on_setattr.
+        for base_cls in cls.__bases__:
+            if base_cls.__setattr__ is _frozen_setattrs:
+                if had_on_setattr:
+                    raise ValueError(
+                        "Frozen classes can't use on_setattr "
+                        "(frozen-ness was inherited)."
+                    )
+
+                on_setattr = setters.NO_OP
+                break
+
+        if auto_attribs is not None:
+            return do_it(cls, auto_attribs)
+
+        try:
+            return do_it(cls, True)
+        except UnannotatedAttributeError:
+            return do_it(cls, False)
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+    if maybe_cls is None:
+        return wrap
+    else:
+        return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+    *,
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    hash=None,
+    init=True,
+    metadata=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+):
+    """
+    Identical to `attr.ib`, except keyword-only and with some arguments
+    removed.
+
+    .. versionadded:: 20.1.0
+    """
+    return attrib(
+        default=default,
+        validator=validator,
+        repr=repr,
+        hash=hash,
+        init=init,
+        metadata=metadata,
+        converter=converter,
+        factory=factory,
+        kw_only=kw_only,
+        eq=eq,
+        order=order,
+        on_setattr=on_setattr,
+    )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+    """
+    Same as `attr.asdict`, except that collections types are always retained
+    and dict is always used as *dict_factory*.
+
+    .. versionadded:: 21.3.0
+    """
+    return _asdict(
+        inst=inst,
+        recurse=recurse,
+        filter=filter,
+        value_serializer=value_serializer,
+        retain_collection_types=True,
+    )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+    """
+    Same as `attr.astuple`, except that collections types are always retained
+    and `tuple` is always used as the *tuple_factory*.
+
+    .. versionadded:: 21.3.0
+    """
+    return _astuple(
+        inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+    )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_version_info.py	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+    """
+    A version object that can be compared to tuple of length 1--4:
+
+    >>> attr.VersionInfo(19, 1, 0, "final")  <= (19, 2)
+    True
+    >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+    True
+    >>> vi = attr.VersionInfo(19, 2, 0, "final")
+    >>> vi < (19, 1, 1)
+    False
+    >>> vi < (19,)
+    False
+    >>> vi == (19, 2,)
+    True
+    >>> vi == (19, 2, 1)
+    False
+
+    .. versionadded:: 19.2
+    """
+
+    year = attrib(type=int)
+    minor = attrib(type=int)
+    micro = attrib(type=int)
+    releaselevel = attrib(type=str)
+
+    @classmethod
+    def _from_version_string(cls, s):
+        """
+        Parse *s* and return a _VersionInfo.
+        """
+        v = s.split(".")
+        if len(v) == 3:
+            v.append("final")
+
+        return cls(
+            year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+        )
+
+    def _ensure_tuple(self, other):
+        """
+        Ensure *other* is a tuple of a valid length.
+
+        Returns a possibly transformed *other* and ourselves as a tuple of
+        the same length as *other*.
+        """
+
+        if self.__class__ is other.__class__:
+            other = astuple(other)
+
+        if not isinstance(other, tuple):
+            raise NotImplementedError
+
+        if not (1 <= len(other) <= 4):
+            raise NotImplementedError
+
+        return astuple(self)[: len(other)], other
+
+    def __eq__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        return us == them
+
+    def __lt__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+        # have to do anything special with releaselevel for now.
+        return us < them
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_version_info.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,9 @@
+class VersionInfo:
+    @property
+    def year(self) -> int: ...
+    @property
+    def minor(self) -> int: ...
+    @property
+    def micro(self) -> int: ...
+    @property
+    def releaselevel(self) -> str: ...
--- a/mercurial/thirdparty/attr/converters.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/converters.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,8 +1,22 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly useful converters.
 """
 
-from __future__ import absolute_import, division, print_function
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Factory, pipe
+
+
+__all__ = [
+    "default_if_none",
+    "optional",
+    "pipe",
+    "to_bool",
+]
 
 
 def optional(converter):
@@ -10,10 +24,13 @@
     A converter that allows an attribute to be optional. An optional attribute
     is one which can be set to ``None``.
 
+    Type annotations will be inferred from the wrapped converter's, if it
+    has any.
+
     :param callable converter: the converter that is used for non-``None``
         values.
 
-    ..  versionadded:: 17.1.0
+    .. versionadded:: 17.1.0
     """
 
     def optional_converter(val):
@@ -21,4 +38,107 @@
             return None
         return converter(val)
 
+    xtr = _AnnotationExtractor(converter)
+
+    t = xtr.get_first_param_type()
+    if t:
+        optional_converter.__annotations__["val"] = typing.Optional[t]
+
+    rt = xtr.get_return_type()
+    if rt:
+        optional_converter.__annotations__["return"] = typing.Optional[rt]
+
     return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+    """
+    A converter that allows to replace ``None`` values by *default* or the
+    result of *factory*.
+
+    :param default: Value to be used if ``None`` is passed. Passing an instance
+       of `attrs.Factory` is supported, however the ``takes_self`` option
+       is *not*.
+    :param callable factory: A callable that takes no parameters whose result
+       is used if ``None`` is passed.
+
+    :raises TypeError: If **neither** *default* or *factory* is passed.
+    :raises TypeError: If **both** *default* and *factory* are passed.
+    :raises ValueError: If an instance of `attrs.Factory` is passed with
+       ``takes_self=True``.
+
+    .. versionadded:: 18.2.0
+    """
+    if default is NOTHING and factory is None:
+        raise TypeError("Must pass either `default` or `factory`.")
+
+    if default is not NOTHING and factory is not None:
+        raise TypeError(
+            "Must pass either `default` or `factory` but not both."
+        )
+
+    if factory is not None:
+        default = Factory(factory)
+
+    if isinstance(default, Factory):
+        if default.takes_self:
+            raise ValueError(
+                "`takes_self` is not supported by default_if_none."
+            )
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default.factory()
+
+    else:
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default
+
+    return default_if_none_converter
+
+
+def to_bool(val):
+    """
+    Convert "boolean" strings (e.g., from env. vars.) to real booleans.
+
+    Values mapping to :code:`True`:
+
+    - :code:`True`
+    - :code:`"true"` / :code:`"t"`
+    - :code:`"yes"` / :code:`"y"`
+    - :code:`"on"`
+    - :code:`"1"`
+    - :code:`1`
+
+    Values mapping to :code:`False`:
+
+    - :code:`False`
+    - :code:`"false"` / :code:`"f"`
+    - :code:`"no"` / :code:`"n"`
+    - :code:`"off"`
+    - :code:`"0"`
+    - :code:`0`
+
+    :raises ValueError: for any other value.
+
+    .. versionadded:: 21.3.0
+    """
+    if isinstance(val, str):
+        val = val.lower()
+    truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
+    falsy = {False, "false", "f", "no", "n", "off", "0", 0}
+    try:
+        if val in truthy:
+            return True
+        if val in falsy:
+            return False
+    except TypeError:
+        # Raised when "val" is not hashable (e.g., lists)
+        pass
+    raise ValueError("Cannot convert value to bool: {}".format(val))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/converters.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
--- a/mercurial/thirdparty/attr/exceptions.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/exceptions.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,17 +1,35 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
 
 
-class FrozenInstanceError(AttributeError):
+class FrozenError(AttributeError):
     """
-    A frozen/immutable instance has been attempted to be modified.
+    A frozen/immutable instance or attribute have been attempted to be
+    modified.
 
     It mirrors the behavior of ``namedtuples`` by using the same error message
-    and subclassing :exc:`AttributeError`.
+    and subclassing `AttributeError`.
+
+    .. versionadded:: 20.1.0
+    """
+
+    msg = "can't set attribute"
+    args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+    """
+    A frozen instance has been attempted to be modified.
 
     .. versionadded:: 16.1.0
     """
-    msg = "can't set attribute"
-    args = [msg]
+
+
+class FrozenAttributeError(FrozenError):
+    """
+    A frozen attribute has been attempted to be modified.
+
+    .. versionadded:: 20.1.0
+    """
 
 
 class AttrsAttributeNotFoundError(ValueError):
@@ -37,3 +55,38 @@
 
     .. versionadded:: 17.1.0
     """
+
+
+class UnannotatedAttributeError(RuntimeError):
+    """
+    A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+    annotation.
+
+    .. versionadded:: 17.3.0
+    """
+
+
+class PythonTooOldError(RuntimeError):
+    """
+    It was attempted to use an ``attrs`` feature that requires a newer Python
+    version.
+
+    .. versionadded:: 18.2.0
+    """
+
+
+class NotCallableError(TypeError):
+    """
+    A ``attr.ib()`` requiring a callable has been set with a value
+    that is not callable.
+
+    .. versionadded:: 19.2.0
+    """
+
+    def __init__(self, msg, value):
+        super(TypeError, self).__init__(msg, value)
+        self.msg = msg
+        self.value = value
+
+    def __str__(self):
+        return str(self.msg)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/exceptions.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+    msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+    msg: str = ...
+    value: Any = ...
+    def __init__(self, msg: str, value: Any) -> None: ...
--- a/mercurial/thirdparty/attr/filters.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/filters.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,10 +1,9 @@
+# SPDX-License-Identifier: MIT
+
 """
-Commonly useful filters for :func:`attr.asdict`.
+Commonly useful filters for `attr.asdict`.
 """
 
-from __future__ import absolute_import, division, print_function
-
-from ._compat import isclass
 from ._make import Attribute
 
 
@@ -13,19 +12,19 @@
     Returns a tuple of `frozenset`s of classes and attributes.
     """
     return (
-        frozenset(cls for cls in what if isclass(cls)),
+        frozenset(cls for cls in what if isinstance(cls, type)),
         frozenset(cls for cls in what if isinstance(cls, Attribute)),
     )
 
 
 def include(*what):
-    r"""
-    Whitelist *what*.
+    """
+    Include *what*.
 
-    :param what: What to whitelist.
-    :type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\ s
+    :param what: What to include.
+    :type what: `list` of `type` or `attrs.Attribute`\\ s
 
-    :rtype: :class:`callable`
+    :rtype: `callable`
     """
     cls, attrs = _split_what(what)
 
@@ -36,13 +35,13 @@
 
 
 def exclude(*what):
-    r"""
-    Blacklist *what*.
+    """
+    Exclude *what*.
 
-    :param what: What to blacklist.
-    :type what: :class:`list` of classes or :class:`attr.Attribute`\ s.
+    :param what: What to exclude.
+    :type what: `list` of classes or `attrs.Attribute`\\ s.
 
-    :rtype: :class:`callable`
+    :rtype: `callable`
     """
     cls, attrs = _split_what(what)
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/filters.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,6 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/setters.py	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,73 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+    """
+    Run all *setters* and return the return value of the last one.
+
+    .. versionadded:: 20.1.0
+    """
+
+    def wrapped_pipe(instance, attrib, new_value):
+        rv = new_value
+
+        for setter in setters:
+            rv = setter(instance, attrib, rv)
+
+        return rv
+
+    return wrapped_pipe
+
+
+def frozen(_, __, ___):
+    """
+    Prevent an attribute to be modified.
+
+    .. versionadded:: 20.1.0
+    """
+    raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+    """
+    Run *attrib*'s validator on *new_value* if it has one.
+
+    .. versionadded:: 20.1.0
+    """
+    if _config._run_validators is False:
+        return new_value
+
+    v = attrib.validator
+    if not v:
+        return new_value
+
+    v(instance, attrib, new_value)
+
+    return new_value
+
+
+def convert(instance, attrib, new_value):
+    """
+    Run *attrib*'s converter -- if it has one --  on *new_value* and return the
+    result.
+
+    .. versionadded:: 20.1.0
+    """
+    c = attrib.converter
+    if c:
+        return c(new_value)
+
+    return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# autodata stopped working, so the docstring is inlined in the API docs.
+NO_OP = object()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/setters.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,19 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+    instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+    instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
--- a/mercurial/thirdparty/attr/validators.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/thirdparty/attr/validators.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1,24 +1,99 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly useful validators.
 """
 
-from __future__ import absolute_import, division, print_function
+
+import operator
+import re
+
+from contextlib import contextmanager
 
-from ._make import attr, attributes, and_, _AndValidator
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+try:
+    Pattern = re.Pattern
+except AttributeError:  # Python <3.7 lacks a Pattern type.
+    Pattern = type(re.compile(""))
 
 
 __all__ = [
     "and_",
+    "deep_iterable",
+    "deep_mapping",
+    "disabled",
+    "ge",
+    "get_disabled",
+    "gt",
     "in_",
     "instance_of",
+    "is_callable",
+    "le",
+    "lt",
+    "matches_re",
+    "max_len",
+    "min_len",
     "optional",
     "provides",
+    "set_disabled",
 ]
 
 
-@attributes(repr=False, slots=True, hash=True)
-class _InstanceOfValidator(object):
-    type = attr()
+def set_disabled(disabled):
+    """
+    Globally disable or enable running validators.
+
+    By default, they are run.
+
+    :param disabled: If ``True``, disable running all validators.
+    :type disabled: bool
+
+    .. warning::
+
+        This function is not thread-safe!
+
+    .. versionadded:: 21.3.0
+    """
+    set_run_validators(not disabled)
+
+
+def get_disabled():
+    """
+    Return a bool indicating whether validators are currently disabled or not.
+
+    :return: ``True`` if validators are currently disabled.
+    :rtype: bool
+
+    .. versionadded:: 21.3.0
+    """
+    return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+    """
+    Context manager that disables running validators within its context.
+
+    .. warning::
+
+        This context manager is not thread-safe!
+
+    .. versionadded:: 21.3.0
+    """
+    set_run_validators(False)
+    try:
+        yield
+    finally:
+        set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator:
+    type = attrib()
 
     def __call__(self, inst, attr, value):
         """
@@ -27,38 +102,116 @@
         if not isinstance(value, self.type):
             raise TypeError(
                 "'{name}' must be {type!r} (got {value!r} that is a "
-                "{actual!r})."
-                .format(name=attr.name, type=self.type,
-                        actual=value.__class__, value=value),
-                attr, self.type, value,
+                "{actual!r}).".format(
+                    name=attr.name,
+                    type=self.type,
+                    actual=value.__class__,
+                    value=value,
+                ),
+                attr,
+                self.type,
+                value,
             )
 
     def __repr__(self):
-        return (
-            "<instance_of validator for type {type!r}>"
-            .format(type=self.type)
+        return "<instance_of validator for type {type!r}>".format(
+            type=self.type
         )
 
 
 def instance_of(type):
     """
-    A validator that raises a :exc:`TypeError` if the initializer is called
-    with a wrong type for this particular attribute (checks are perfomed using
-    :func:`isinstance` therefore it's also valid to pass a tuple of types).
+    A validator that raises a `TypeError` if the initializer is called
+    with a wrong type for this particular attribute (checks are performed using
+    `isinstance` therefore it's also valid to pass a tuple of types).
 
     :param type: The type to check for.
     :type type: type or tuple of types
 
     :raises TypeError: With a human readable error message, the attribute
-        (of type :class:`attr.Attribute`), the expected type, and the value it
+        (of type `attrs.Attribute`), the expected type, and the value it
         got.
     """
     return _InstanceOfValidator(type)
 
 
-@attributes(repr=False, slots=True, hash=True)
-class _ProvidesValidator(object):
-    interface = attr()
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+    pattern = attrib()
+    match_func = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not self.match_func(value):
+            raise ValueError(
+                "'{name}' must match regex {pattern!r}"
+                " ({value!r} doesn't)".format(
+                    name=attr.name, pattern=self.pattern.pattern, value=value
+                ),
+                attr,
+                self.pattern,
+                value,
+            )
+
+    def __repr__(self):
+        return "<matches_re validator for pattern {pattern!r}>".format(
+            pattern=self.pattern
+        )
+
+
+def matches_re(regex, flags=0, func=None):
+    r"""
+    A validator that raises `ValueError` if the initializer is called
+    with a string that doesn't match *regex*.
+
+    :param regex: a regex string or precompiled pattern to match against
+    :param int flags: flags that will be passed to the underlying re function
+        (default 0)
+    :param callable func: which underlying `re` function to call. Valid options
+        are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
+        means `re.fullmatch`. For performance reasons, the pattern is always
+        precompiled using `re.compile`.
+
+    .. versionadded:: 19.2.0
+    .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+    """
+    valid_funcs = (re.fullmatch, None, re.search, re.match)
+    if func not in valid_funcs:
+        raise ValueError(
+            "'func' must be one of {}.".format(
+                ", ".join(
+                    sorted(
+                        e and e.__name__ or "None" for e in set(valid_funcs)
+                    )
+                )
+            )
+        )
+
+    if isinstance(regex, Pattern):
+        if flags:
+            raise TypeError(
+                "'flags' can only be used with a string pattern; "
+                "pass flags to re.compile() instead"
+            )
+        pattern = regex
+    else:
+        pattern = re.compile(regex, flags)
+
+    if func is re.match:
+        match_func = pattern.match
+    elif func is re.search:
+        match_func = pattern.search
+    else:
+        match_func = pattern.fullmatch
+
+    return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator:
+    interface = attrib()
 
     def __call__(self, inst, attr, value):
         """
@@ -67,37 +220,40 @@
         if not self.interface.providedBy(value):
             raise TypeError(
                 "'{name}' must provide {interface!r} which {value!r} "
-                "doesn't."
-                .format(name=attr.name, interface=self.interface, value=value),
-                attr, self.interface, value,
+                "doesn't.".format(
+                    name=attr.name, interface=self.interface, value=value
+                ),
+                attr,
+                self.interface,
+                value,
             )
 
     def __repr__(self):
-        return (
-            "<provides validator for interface {interface!r}>"
-            .format(interface=self.interface)
+        return "<provides validator for interface {interface!r}>".format(
+            interface=self.interface
         )
 
 
 def provides(interface):
     """
-    A validator that raises a :exc:`TypeError` if the initializer is called
+    A validator that raises a `TypeError` if the initializer is called
     with an object that does not provide the requested *interface* (checks are
     performed using ``interface.providedBy(value)`` (see `zope.interface
     <https://zopeinterface.readthedocs.io/en/latest/>`_).
 
-    :param zope.interface.Interface interface: The interface to check for.
+    :param interface: The interface to check for.
+    :type interface: ``zope.interface.Interface``
 
     :raises TypeError: With a human readable error message, the attribute
-        (of type :class:`attr.Attribute`), the expected interface, and the
+        (of type `attrs.Attribute`), the expected interface, and the
         value it got.
     """
     return _ProvidesValidator(interface)
 
 
-@attributes(repr=False, slots=True, hash=True)
-class _OptionalValidator(object):
-    validator = attr()
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator:
+    validator = attrib()
 
     def __call__(self, inst, attr, value):
         if value is None:
@@ -106,9 +262,8 @@
         self.validator(inst, attr, value)
 
     def __repr__(self):
-        return (
-            "<optional validator for {what} or None>"
-            .format(what=repr(self.validator))
+        return "<optional validator for {what} or None>".format(
+            what=repr(self.validator)
         )
 
 
@@ -120,7 +275,7 @@
 
     :param validator: A validator (or a list of validators) that is used for
         non-``None`` values.
-    :type validator: callable or :class:`list` of callables.
+    :type validator: callable or `list` of callables.
 
     .. versionadded:: 15.1.0
     .. versionchanged:: 17.1.0 *validator* can be a list of validators.
@@ -130,37 +285,310 @@
     return _OptionalValidator(validator)
 
 
-@attributes(repr=False, slots=True, hash=True)
-class _InValidator(object):
-    options = attr()
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator:
+    options = attrib()
 
     def __call__(self, inst, attr, value):
-        if value not in self.options:
+        try:
+            in_options = value in self.options
+        except TypeError:  # e.g. `1 in "abc"`
+            in_options = False
+
+        if not in_options:
             raise ValueError(
-                "'{name}' must be in {options!r} (got {value!r})"
-                .format(name=attr.name, options=self.options, value=value)
+                "'{name}' must be in {options!r} (got {value!r})".format(
+                    name=attr.name, options=self.options, value=value
+                ),
+                attr,
+                self.options,
+                value,
             )
 
     def __repr__(self):
-        return (
-            "<in_ validator with options {options!r}>"
-            .format(options=self.options)
+        return "<in_ validator with options {options!r}>".format(
+            options=self.options
         )
 
 
 def in_(options):
     """
-    A validator that raises a :exc:`ValueError` if the initializer is called
+    A validator that raises a `ValueError` if the initializer is called
     with a value that does not belong in the options provided.  The check is
     performed using ``value in options``.
 
     :param options: Allowed options.
-    :type options: list, tuple, :class:`enum.Enum`, ...
+    :type options: list, tuple, `enum.Enum`, ...
 
     :raises ValueError: With a human readable error message, the attribute (of
-       type :class:`attr.Attribute`), the expected options, and the value it
+       type `attrs.Attribute`), the expected options, and the value it
        got.
 
     .. versionadded:: 17.1.0
+    .. versionchanged:: 22.1.0
+       The ValueError was incomplete until now and only contained the human
+       readable error message. Now it contains all the information that has
+       been promised since 17.1.0.
     """
     return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator:
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not callable(value):
+            message = (
+                "'{name}' must be callable "
+                "(got {value!r} that is a {actual!r})."
+            )
+            raise NotCallableError(
+                msg=message.format(
+                    name=attr.name, value=value, actual=value.__class__
+                ),
+                value=value,
+            )
+
+    def __repr__(self):
+        return "<is_callable validator>"
+
+
+def is_callable():
+    """
+    A validator that raises a `attr.exceptions.NotCallableError` if the
+    initializer is called with a value for this particular attribute
+    that is not callable.
+
+    .. versionadded:: 19.1.0
+
+    :raises `attr.exceptions.NotCallableError`: With a human readable error
+        message containing the attribute (`attrs.Attribute`) name,
+        and the value it got.
+    """
+    return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable:
+    member_validator = attrib(validator=is_callable())
+    iterable_validator = attrib(
+        default=None, validator=optional(is_callable())
+    )
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if self.iterable_validator is not None:
+            self.iterable_validator(inst, attr, value)
+
+        for member in value:
+            self.member_validator(inst, attr, member)
+
+    def __repr__(self):
+        iterable_identifier = (
+            ""
+            if self.iterable_validator is None
+            else " {iterable!r}".format(iterable=self.iterable_validator)
+        )
+        return (
+            "<deep_iterable validator for{iterable_identifier}"
+            " iterables of {member!r}>"
+        ).format(
+            iterable_identifier=iterable_identifier,
+            member=self.member_validator,
+        )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+    """
+    A validator that performs deep validation of an iterable.
+
+    :param member_validator: Validator(s) to apply to iterable members
+    :param iterable_validator: Validator to apply to iterable itself
+        (optional)
+
+    .. versionadded:: 19.1.0
+
+    :raises TypeError: if any sub-validators fail
+    """
+    if isinstance(member_validator, (list, tuple)):
+        member_validator = and_(*member_validator)
+    return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping:
+    key_validator = attrib(validator=is_callable())
+    value_validator = attrib(validator=is_callable())
+    mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if self.mapping_validator is not None:
+            self.mapping_validator(inst, attr, value)
+
+        for key in value:
+            self.key_validator(inst, attr, key)
+            self.value_validator(inst, attr, value[key])
+
+    def __repr__(self):
+        return (
+            "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+        ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+    """
+    A validator that performs deep validation of a dictionary.
+
+    :param key_validator: Validator to apply to dictionary keys
+    :param value_validator: Validator to apply to dictionary values
+    :param mapping_validator: Validator to apply to top-level mapping
+        attribute (optional)
+
+    .. versionadded:: 19.1.0
+
+    :raises TypeError: if any sub-validators fail
+    """
+    return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+    bound = attrib()
+    compare_op = attrib()
+    compare_func = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not self.compare_func(value, self.bound):
+            raise ValueError(
+                "'{name}' must be {op} {bound}: {value}".format(
+                    name=attr.name,
+                    op=self.compare_op,
+                    bound=self.bound,
+                    value=value,
+                )
+            )
+
+    def __repr__(self):
+        return "<Validator for x {op} {bound}>".format(
+            op=self.compare_op, bound=self.bound
+        )
+
+
+def lt(val):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a number larger or equal to *val*.
+
+    :param val: Exclusive upper bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a number greater than *val*.
+
+    :param val: Inclusive upper bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a number smaller than *val*.
+
+    :param val: Inclusive lower bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a number smaller or equal to *val*.
+
+    :param val: Exclusive lower bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+    max_length = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if len(value) > self.max_length:
+            raise ValueError(
+                "Length of '{name}' must be <= {max}: {len}".format(
+                    name=attr.name, max=self.max_length, len=len(value)
+                )
+            )
+
+    def __repr__(self):
+        return "<max_len validator for {max}>".format(max=self.max_length)
+
+
+def max_len(length):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a string or iterable that is longer than *length*.
+
+    :param int length: Maximum length of the string or iterable
+
+    .. versionadded:: 21.3.0
+    """
+    return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+    min_length = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if len(value) < self.min_length:
+            raise ValueError(
+                "Length of '{name}' must be => {min}: {len}".format(
+                    name=attr.name, min=self.min_length, len=len(value)
+                )
+            )
+
+    def __repr__(self):
+        return "<min_len validator for {min}>".format(min=self.min_length)
+
+
+def min_len(length):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a string or iterable that is shorter than *length*.
+
+    :param int length: Minimum length of the string or iterable
+
+    .. versionadded:: 22.1.0
+    """
+    return _MinLengthValidator(length)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/validators.pyi	Fri Nov 25 15:14:40 2022 +0100
@@ -0,0 +1,80 @@
+from typing import (
+    Any,
+    AnyStr,
+    Callable,
+    Container,
+    ContextManager,
+    Iterable,
+    List,
+    Mapping,
+    Match,
+    Optional,
+    Pattern,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    overload,
+)
+
+from . import _ValidatorType
+from . import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+    type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+    type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+    validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+    regex: Union[Pattern[AnyStr], AnyStr],
+    flags: int = ...,
+    func: Optional[
+        Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+    ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+    member_validator: _ValidatorArgType[_T],
+    iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+    key_validator: _ValidatorType[_K],
+    value_validator: _ValidatorType[_V],
+    mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
--- a/mercurial/util.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/util.py	Fri Nov 25 15:14:40 2022 +0100
@@ -2542,6 +2542,7 @@
         # delegated methods
         self.read = self._fp.read
         self.write = self._fp.write
+        self.writelines = self._fp.writelines
         self.seek = self._fp.seek
         self.tell = self._fp.tell
         self.fileno = self._fp.fileno
--- a/mercurial/utils/storageutil.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/utils/storageutil.py	Fri Nov 25 15:14:40 2022 +0100
@@ -305,6 +305,7 @@
     revisiondata=False,
     assumehaveparentrevisions=False,
     sidedata_helpers=None,
+    debug_info=None,
 ):
     """Generic implementation of ifiledata.emitrevisions().
 
@@ -370,6 +371,10 @@
     ``sidedata_helpers`` (optional)
         If not None, means that sidedata should be included.
         See `revlogutil.sidedata.get_sidedata_helpers`.
+
+    ``debug_info`
+        An optionnal dictionnary to gather information about the bundling
+        process (if present, see config: debug.bundling.stats.
     """
 
     fnode = store.node
@@ -395,23 +400,43 @@
         if rev == nullrev:
             continue
 
+        debug_delta_source = None
+        if debug_info is not None:
+            debug_info['revision-total'] += 1
+
         node = fnode(rev)
         p1rev, p2rev = store.parentrevs(rev)
 
+        if debug_info is not None:
+            if p1rev != p2rev and p1rev != nullrev and p2rev != nullrev:
+                debug_info['merge-total'] += 1
+
         if deltaparentfn:
             deltaparentrev = deltaparentfn(rev)
+            if debug_info is not None:
+                if deltaparentrev == nullrev:
+                    debug_info['available-full'] += 1
+                else:
+                    debug_info['available-delta'] += 1
+
         else:
             deltaparentrev = nullrev
 
         # Forced delta against previous mode.
         if deltamode == repository.CG_DELTAMODE_PREV:
+            if debug_info is not None:
+                debug_delta_source = "prev"
             baserev = prevrev
 
         # We're instructed to send fulltext. Honor that.
         elif deltamode == repository.CG_DELTAMODE_FULL:
+            if debug_info is not None:
+                debug_delta_source = "full"
             baserev = nullrev
         # We're instructed to use p1. Honor that
         elif deltamode == repository.CG_DELTAMODE_P1:
+            if debug_info is not None:
+                debug_delta_source = "p1"
             baserev = p1rev
 
         # There is a delta in storage. We try to use that because it
@@ -421,20 +446,29 @@
             # Base revision was already emitted in this group. We can
             # always safely use the delta.
             if deltaparentrev in available:
+                if debug_info is not None:
+                    debug_delta_source = "storage"
                 baserev = deltaparentrev
 
             # Base revision is a parent that hasn't been emitted already.
             # Use it if we can assume the receiver has the parent revision.
             elif assumehaveparentrevisions and deltaparentrev in (p1rev, p2rev):
+                if debug_info is not None:
+                    debug_delta_source = "storage"
                 baserev = deltaparentrev
-
             # No guarantee the receiver has the delta parent. Send delta
             # against last revision (if possible), which in the common case
             # should be similar enough to this revision that the delta is
             # reasonable.
             elif prevrev is not None:
+                if debug_info is not None:
+                    debug_info['denied-base-not-available'] += 1
+                    debug_delta_source = "prev"
                 baserev = prevrev
             else:
+                if debug_info is not None:
+                    debug_info['denied-base-not-available'] += 1
+                    debug_delta_source = "full"
                 baserev = nullrev
 
         # Storage has a fulltext revision.
@@ -442,13 +476,24 @@
         # Let's use the previous revision, which is as good a guess as any.
         # There is definitely room to improve this logic.
         elif prevrev is not None:
+            if debug_info is not None:
+                debug_delta_source = "prev"
             baserev = prevrev
         else:
+            if debug_info is not None:
+                debug_delta_source = "full"
             baserev = nullrev
 
         # But we can't actually use our chosen delta base for whatever
         # reason. Reset to fulltext.
-        if baserev != nullrev and (candeltafn and not candeltafn(baserev, rev)):
+        if (
+            baserev != nullrev
+            and candeltafn is not None
+            and not candeltafn(baserev, rev)
+        ):
+            if debug_info is not None:
+                debug_delta_source = "full"
+                debug_info['denied-delta-candeltafn'] += 1
             baserev = nullrev
 
         revision = None
@@ -460,6 +505,9 @@
                 try:
                     revision = store.rawdata(node)
                 except error.CensoredNodeError as e:
+                    if debug_info is not None:
+                        debug_delta_source = "full"
+                        debug_info['denied-delta-not-available'] += 1
                     revision = e.tombstone
 
                 if baserev != nullrev:
@@ -471,12 +519,46 @@
             elif (
                 baserev == nullrev and deltamode != repository.CG_DELTAMODE_PREV
             ):
+                if debug_info is not None:
+                    debug_info['computed-delta'] += 1  # close enough
+                    debug_info['delta-full'] += 1
                 revision = store.rawdata(node)
                 available.add(rev)
             else:
                 if revdifffn:
+                    if debug_info is not None:
+                        if debug_delta_source == "full":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-full'] += 1
+                        elif debug_delta_source == "prev":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-against-prev'] += 1
+                        elif debug_delta_source == "p1":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-against-p1'] += 1
+                        elif debug_delta_source == "storage":
+                            debug_info['reused-storage-delta'] += 1
+                        else:
+                            assert False, 'unreachable'
+
                     delta = revdifffn(baserev, rev)
                 else:
+                    if debug_info is not None:
+                        if debug_delta_source == "full":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-full'] += 1
+                        elif debug_delta_source == "prev":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-against-prev'] += 1
+                        elif debug_delta_source == "p1":
+                            debug_info['computed-delta'] += 1
+                            debug_info['delta-against-p1'] += 1
+                        elif debug_delta_source == "storage":
+                            # seem quite unlikelry to happens
+                            debug_info['computed-delta'] += 1
+                            debug_info['reused-storage-delta'] += 1
+                        else:
+                            assert False, 'unreachable'
                     delta = mdiff.textdiff(
                         store.rawdata(baserev), store.rawdata(rev)
                     )
--- a/mercurial/utils/stringutil.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/utils/stringutil.py	Fri Nov 25 15:14:40 2022 +0100
@@ -14,6 +14,11 @@
 import textwrap
 import types
 
+from typing import (
+    Optional,
+    overload,
+)
+
 from ..i18n import _
 from ..thirdparty import attr
 
@@ -30,6 +35,16 @@
 regexbytesescapemap = {i: (b'\\' + i) for i in _respecial}
 
 
+@overload
+def reescape(pat: bytes) -> bytes:
+    ...
+
+
+@overload
+def reescape(pat: str) -> str:
+    ...
+
+
 def reescape(pat):
     """Drop-in replacement for re.escape."""
     # NOTE: it is intentional that this works on unicodes and not
@@ -45,12 +60,12 @@
     return pat.encode('latin1')
 
 
-def pprint(o, bprefix=False, indent=0, level=0):
+def pprint(o, bprefix: bool = False, indent: int = 0, level: int = 0) -> bytes:
     """Pretty print an object."""
     return b''.join(pprintgen(o, bprefix=bprefix, indent=indent, level=level))
 
 
-def pprintgen(o, bprefix=False, indent=0, level=0):
+def pprintgen(o, bprefix: bool = False, indent: int = 0, level: int = 0):
     """Pretty print an object to a generator of atoms.
 
     ``bprefix`` is a flag influencing whether bytestrings are preferred with
@@ -250,7 +265,7 @@
         yield pycompat.byterepr(o)
 
 
-def prettyrepr(o):
+def prettyrepr(o) -> bytes:
     """Pretty print a representation of a possibly-nested object"""
     lines = []
     rs = pycompat.byterepr(o)
@@ -281,7 +296,7 @@
     return b'\n'.join(b'  ' * l + s for l, s in lines)
 
 
-def buildrepr(r):
+def buildrepr(r) -> bytes:
     """Format an optional printable representation from unexpanded bits
 
     ========  =================================
@@ -305,12 +320,12 @@
         return pprint(r)
 
 
-def binary(s):
+def binary(s: bytes) -> bool:
     """return true if a string is binary data"""
     return bool(s and b'\0' in s)
 
 
-def _splitpattern(pattern):
+def _splitpattern(pattern: bytes):
     if pattern.startswith(b're:'):
         return b're', pattern[3:]
     elif pattern.startswith(b'literal:'):
@@ -318,7 +333,7 @@
     return b'literal', pattern
 
 
-def stringmatcher(pattern, casesensitive=True):
+def stringmatcher(pattern: bytes, casesensitive: bool = True):
     """
     accepts a string, possibly starting with 're:' or 'literal:' prefix.
     returns the matcher name, pattern, and matcher function.
@@ -379,7 +394,7 @@
     raise error.ProgrammingError(b'unhandled pattern kind: %s' % kind)
 
 
-def substringregexp(pattern, flags=0):
+def substringregexp(pattern: bytes, flags: int = 0):
     """Build a regexp object from a string pattern possibly starting with
     're:' or 'literal:' prefix.
 
@@ -431,7 +446,7 @@
     raise error.ProgrammingError(b'unhandled pattern kind: %s' % kind)
 
 
-def shortuser(user):
+def shortuser(user: bytes) -> bytes:
     """Return a short representation of a user name or email address."""
     f = user.find(b'@')
     if f >= 0:
@@ -448,7 +463,7 @@
     return user
 
 
-def emailuser(user):
+def emailuser(user: bytes) -> bytes:
     """Return the user portion of an email address."""
     f = user.find(b'@')
     if f >= 0:
@@ -459,7 +474,7 @@
     return user
 
 
-def email(author):
+def email(author: bytes) -> bytes:
     '''get email of author.'''
     r = author.find(b'>')
     if r == -1:
@@ -467,7 +482,7 @@
     return author[author.find(b'<') + 1 : r]
 
 
-def person(author):
+def person(author: bytes) -> bytes:
     """Returns the name before an email address,
     interpreting it as per RFC 5322
 
@@ -612,7 +627,7 @@
     return mailmap
 
 
-def mapname(mailmap, author):
+def mapname(mailmap, author: bytes) -> bytes:
     """Returns the author field according to the mailmap cache, or
     the original author field.
 
@@ -663,7 +678,7 @@
 _correctauthorformat = remod.compile(br'^[^<]+\s<[^<>]+@[^<>]+>$')
 
 
-def isauthorwellformed(author):
+def isauthorwellformed(author: bytes) -> bool:
     """Return True if the author field is well formed
     (ie "Contributor Name <contrib@email.dom>")
 
@@ -685,7 +700,7 @@
     return _correctauthorformat.match(author) is not None
 
 
-def firstline(text):
+def firstline(text: bytes) -> bytes:
     """Return the first line of the input"""
     # Try to avoid running splitlines() on the whole string
     i = text.find(b'\n')
@@ -697,21 +712,26 @@
         return b''
 
 
-def ellipsis(text, maxlength=400):
+def ellipsis(text: bytes, maxlength: int = 400) -> bytes:
     """Trim string to at most maxlength (default: 400) columns in display."""
     return encoding.trim(text, maxlength, ellipsis=b'...')
 
 
-def escapestr(s):
+def escapestr(s: bytes) -> bytes:
+    # "bytes" is also a typing shortcut for bytes, bytearray, and memoryview
     if isinstance(s, memoryview):
         s = bytes(s)
     # call underlying function of s.encode('string_escape') directly for
     # Python 3 compatibility
+    # pytype: disable=bad-return-type
     return codecs.escape_encode(s)[0]  # pytype: disable=module-attr
+    # pytype: enable=bad-return-type
 
 
-def unescapestr(s):
+def unescapestr(s: bytes) -> bytes:
+    # pytype: disable=bad-return-type
     return codecs.escape_decode(s)[0]  # pytype: disable=module-attr
+    # pytype: enable=bad-return-type
 
 
 def forcebytestr(obj):
@@ -724,7 +744,7 @@
         return pycompat.bytestr(encoding.strtolocal(str(obj)))
 
 
-def uirepr(s):
+def uirepr(s: bytes) -> bytes:
     # Avoid double backslash in Windows path repr()
     return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
 
@@ -838,7 +858,9 @@
     return tw(**kwargs)
 
 
-def wrap(line, width, initindent=b'', hangindent=b''):
+def wrap(
+    line: bytes, width: int, initindent: bytes = b'', hangindent: bytes = b''
+) -> bytes:
     maxindent = max(len(hangindent), len(initindent))
     if width <= maxindent:
         # adjust for weird terminal size
@@ -875,7 +897,7 @@
 }
 
 
-def parsebool(s):
+def parsebool(s: bytes) -> Optional[bool]:
     """Parse s into a boolean.
 
     If s is not a valid boolean, returns None.
@@ -883,7 +905,8 @@
     return _booleans.get(s.lower(), None)
 
 
-def parselist(value):
+# TODO: make arg mandatory (and fix code below?)
+def parselist(value: Optional[bytes]):
     """parse a configuration value as a list of comma/space separated strings
 
     >>> parselist(b'this,is "a small" ,test')
@@ -973,7 +996,7 @@
     return result or []
 
 
-def evalpythonliteral(s):
+def evalpythonliteral(s: bytes):
     """Evaluate a string containing a Python literal expression"""
     # We could backport our tokenizer hack to rewrite '' to u'' if we want
     return ast.literal_eval(s.decode('latin1'))
--- a/mercurial/vfs.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/mercurial/vfs.py	Fri Nov 25 15:14:40 2022 +0100
@@ -11,6 +11,10 @@
 import stat
 import threading
 
+from typing import (
+    Optional,
+)
+
 from .i18n import _
 from .pycompat import (
     delattr,
@@ -26,7 +30,7 @@
 )
 
 
-def _avoidambig(path, oldstat):
+def _avoidambig(path: bytes, oldstat):
     """Avoid file stat ambiguity forcibly
 
     This function causes copying ``path`` file, if it is owned by
@@ -60,16 +64,17 @@
         '''Prevent instantiation; don't call this from subclasses.'''
         raise NotImplementedError('attempted instantiating ' + str(type(self)))
 
-    def __call__(self, path, mode=b'rb', **kwargs):
+    # TODO: type return, which is util.posixfile wrapped by a proxy
+    def __call__(self, path: bytes, mode: bytes = b'rb', **kwargs):
         raise NotImplementedError
 
-    def _auditpath(self, path, mode):
+    def _auditpath(self, path: bytes, mode: bytes):
         raise NotImplementedError
 
-    def join(self, path, *insidef):
+    def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
         raise NotImplementedError
 
-    def tryread(self, path):
+    def tryread(self, path: bytes) -> bytes:
         '''gracefully return an empty string for missing files'''
         try:
             return self.read(path)
@@ -77,7 +82,7 @@
             pass
         return b""
 
-    def tryreadlines(self, path, mode=b'rb'):
+    def tryreadlines(self, path: bytes, mode: bytes = b'rb'):
         '''gracefully return an empty array for missing files'''
         try:
             return self.readlines(path, mode=mode)
@@ -95,57 +100,61 @@
         """
         return self.__call__
 
-    def read(self, path):
+    def read(self, path: bytes) -> bytes:
         with self(path, b'rb') as fp:
             return fp.read()
 
-    def readlines(self, path, mode=b'rb'):
+    def readlines(self, path: bytes, mode: bytes = b'rb'):
         with self(path, mode=mode) as fp:
             return fp.readlines()
 
-    def write(self, path, data, backgroundclose=False, **kwargs):
+    def write(
+        self, path: bytes, data: bytes, backgroundclose=False, **kwargs
+    ) -> int:
         with self(path, b'wb', backgroundclose=backgroundclose, **kwargs) as fp:
             return fp.write(data)
 
-    def writelines(self, path, data, mode=b'wb', notindexed=False):
+    def writelines(
+        self, path: bytes, data: bytes, mode: bytes = b'wb', notindexed=False
+    ) -> None:
         with self(path, mode=mode, notindexed=notindexed) as fp:
             return fp.writelines(data)
 
-    def append(self, path, data):
+    def append(self, path: bytes, data: bytes) -> int:
         with self(path, b'ab') as fp:
             return fp.write(data)
 
-    def basename(self, path):
+    def basename(self, path: bytes) -> bytes:
         """return base element of a path (as os.path.basename would do)
 
         This exists to allow handling of strange encoding if needed."""
         return os.path.basename(path)
 
-    def chmod(self, path, mode):
+    def chmod(self, path: bytes, mode: int) -> None:
         return os.chmod(self.join(path), mode)
 
-    def dirname(self, path):
+    def dirname(self, path: bytes) -> bytes:
         """return dirname element of a path (as os.path.dirname would do)
 
         This exists to allow handling of strange encoding if needed."""
         return os.path.dirname(path)
 
-    def exists(self, path=None):
+    def exists(self, path: Optional[bytes] = None) -> bool:
         return os.path.exists(self.join(path))
 
     def fstat(self, fp):
         return util.fstat(fp)
 
-    def isdir(self, path=None):
+    def isdir(self, path: Optional[bytes] = None) -> bool:
         return os.path.isdir(self.join(path))
 
-    def isfile(self, path=None):
+    def isfile(self, path: Optional[bytes] = None) -> bool:
         return os.path.isfile(self.join(path))
 
-    def islink(self, path=None):
+    def islink(self, path: Optional[bytes] = None) -> bool:
         return os.path.islink(self.join(path))
 
-    def isfileorlink(self, path=None):
+    def isfileorlink(self, path: Optional[bytes] = None) -> bool:
         """return whether path is a regular file or a symlink
 
         Unlike isfile, this doesn't follow symlinks."""
@@ -156,7 +165,7 @@
         mode = st.st_mode
         return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
 
-    def _join(self, *paths):
+    def _join(self, *paths: bytes) -> bytes:
         root_idx = 0
         for idx, p in enumerate(paths):
             if os.path.isabs(p) or p.startswith(self._dir_sep):
@@ -166,41 +175,48 @@
         paths = [p for p in paths if p]
         return self._dir_sep.join(paths)
 
-    def reljoin(self, *paths):
+    def reljoin(self, *paths: bytes) -> bytes:
         """join various elements of a path together (as os.path.join would do)
 
         The vfs base is not injected so that path stay relative. This exists
         to allow handling of strange encoding if needed."""
         return self._join(*paths)
 
-    def split(self, path):
+    def split(self, path: bytes):
         """split top-most element of a path (as os.path.split would do)
 
         This exists to allow handling of strange encoding if needed."""
         return os.path.split(path)
 
-    def lexists(self, path=None):
+    def lexists(self, path: Optional[bytes] = None) -> bool:
         return os.path.lexists(self.join(path))
 
-    def lstat(self, path=None):
+    def lstat(self, path: Optional[bytes] = None):
         return os.lstat(self.join(path))
 
-    def listdir(self, path=None):
+    def listdir(self, path: Optional[bytes] = None):
         return os.listdir(self.join(path))
 
-    def makedir(self, path=None, notindexed=True):
+    def makedir(self, path: Optional[bytes] = None, notindexed=True):
         return util.makedir(self.join(path), notindexed)
 
-    def makedirs(self, path=None, mode=None):
+    def makedirs(
+        self, path: Optional[bytes] = None, mode: Optional[int] = None
+    ):
         return util.makedirs(self.join(path), mode)
 
-    def makelock(self, info, path):
+    def makelock(self, info, path: bytes):
         return util.makelock(info, self.join(path))
 
-    def mkdir(self, path=None):
+    def mkdir(self, path: Optional[bytes] = None):
         return os.mkdir(self.join(path))
 
-    def mkstemp(self, suffix=b'', prefix=b'tmp', dir=None):
+    def mkstemp(
+        self,
+        suffix: bytes = b'',
+        prefix: bytes = b'tmp',
+        dir: Optional[bytes] = None,
+    ):
         fd, name = pycompat.mkstemp(
             suffix=suffix, prefix=prefix, dir=self.join(dir)
         )
@@ -210,13 +226,13 @@
         else:
             return fd, fname
 
-    def readdir(self, path=None, stat=None, skip=None):
+    def readdir(self, path: Optional[bytes] = None, stat=None, skip=None):
         return util.listdir(self.join(path), stat, skip)
 
-    def readlock(self, path):
+    def readlock(self, path: bytes) -> bytes:
         return util.readlock(self.join(path))
 
-    def rename(self, src, dst, checkambig=False):
+    def rename(self, src: bytes, dst: bytes, checkambig=False):
         """Rename from src to dst
 
         checkambig argument is used with util.filestat, and is useful
@@ -238,18 +254,20 @@
             return ret
         return util.rename(srcpath, dstpath)
 
-    def readlink(self, path):
+    def readlink(self, path: bytes) -> bytes:
         return util.readlink(self.join(path))
 
-    def removedirs(self, path=None):
+    def removedirs(self, path: Optional[bytes] = None):
         """Remove a leaf directory and all empty intermediate ones"""
         return util.removedirs(self.join(path))
 
-    def rmdir(self, path=None):
+    def rmdir(self, path: Optional[bytes] = None):
         """Remove an empty directory."""
         return os.rmdir(self.join(path))
 
-    def rmtree(self, path=None, ignore_errors=False, forcibly=False):
+    def rmtree(
+        self, path: Optional[bytes] = None, ignore_errors=False, forcibly=False
+    ):
         """Remove a directory tree recursively
 
         If ``forcibly``, this tries to remove READ-ONLY files, too.
@@ -272,28 +290,30 @@
             self.join(path), ignore_errors=ignore_errors, onerror=onerror
         )
 
-    def setflags(self, path, l, x):
+    def setflags(self, path: bytes, l: bool, x: bool):
         return util.setflags(self.join(path), l, x)
 
-    def stat(self, path=None):
+    def stat(self, path: Optional[bytes] = None):
         return os.stat(self.join(path))
 
-    def unlink(self, path=None):
+    def unlink(self, path: Optional[bytes] = None):
         return util.unlink(self.join(path))
 
-    def tryunlink(self, path=None):
+    def tryunlink(self, path: Optional[bytes] = None):
         """Attempt to remove a file, ignoring missing file errors."""
         util.tryunlink(self.join(path))
 
-    def unlinkpath(self, path=None, ignoremissing=False, rmdir=True):
+    def unlinkpath(
+        self, path: Optional[bytes] = None, ignoremissing=False, rmdir=True
+    ):
         return util.unlinkpath(
             self.join(path), ignoremissing=ignoremissing, rmdir=rmdir
         )
 
-    def utime(self, path=None, t=None):
+    def utime(self, path: Optional[bytes] = None, t=None):
         return os.utime(self.join(path), t)
 
-    def walk(self, path=None, onerror=None):
+    def walk(self, path: Optional[bytes] = None, onerror=None):
         """Yield (dirpath, dirs, files) tuple for each directories under path
 
         ``dirpath`` is relative one from the root of this vfs. This
@@ -360,7 +380,7 @@
 
     def __init__(
         self,
-        base,
+        base: bytes,
         audit=True,
         cacheaudited=False,
         expandpath=False,
@@ -381,7 +401,7 @@
         self.options = {}
 
     @util.propertycache
-    def _cansymlink(self):
+    def _cansymlink(self) -> bool:
         return util.checklink(self.base)
 
     @util.propertycache
@@ -393,7 +413,7 @@
             return
         os.chmod(name, self.createmode & 0o666)
 
-    def _auditpath(self, path, mode):
+    def _auditpath(self, path, mode) -> None:
         if self._audit:
             if os.path.isabs(path) and path.startswith(self.base):
                 path = os.path.relpath(path, self.base)
@@ -404,8 +424,8 @@
 
     def __call__(
         self,
-        path,
-        mode=b"r",
+        path: bytes,
+        mode: bytes = b"rb",
         atomictemp=False,
         notindexed=False,
         backgroundclose=False,
@@ -518,7 +538,7 @@
 
         return fp
 
-    def symlink(self, src, dst):
+    def symlink(self, src: bytes, dst: bytes) -> None:
         self.audit(dst)
         linkname = self.join(dst)
         util.tryunlink(linkname)
@@ -538,7 +558,7 @@
         else:
             self.write(dst, src)
 
-    def join(self, path, *insidef):
+    def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
         if path:
             parts = [self.base, path]
             parts.extend(insidef)
@@ -551,7 +571,7 @@
 
 
 class proxyvfs(abstractvfs):
-    def __init__(self, vfs):
+    def __init__(self, vfs: "vfs"):
         self.vfs = vfs
 
     def _auditpath(self, path, mode):
@@ -569,14 +589,14 @@
 class filtervfs(proxyvfs, abstractvfs):
     '''Wrapper vfs for filtering filenames with a function.'''
 
-    def __init__(self, vfs, filter):
+    def __init__(self, vfs: "vfs", filter):
         proxyvfs.__init__(self, vfs)
         self._filter = filter
 
-    def __call__(self, path, *args, **kwargs):
+    def __call__(self, path: bytes, *args, **kwargs):
         return self.vfs(self._filter(path), *args, **kwargs)
 
-    def join(self, path, *insidef):
+    def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
         if path:
             return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
         else:
@@ -589,15 +609,15 @@
 class readonlyvfs(proxyvfs):
     '''Wrapper vfs preventing any writing.'''
 
-    def __init__(self, vfs):
+    def __init__(self, vfs: "vfs"):
         proxyvfs.__init__(self, vfs)
 
-    def __call__(self, path, mode=b'r', *args, **kw):
+    def __call__(self, path: bytes, mode: bytes = b'rb', *args, **kw):
         if mode not in (b'r', b'rb'):
             raise error.Abort(_(b'this vfs is read only'))
         return self.vfs(path, mode, *args, **kw)
 
-    def join(self, path, *insidef):
+    def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
         return self.vfs.join(path, *insidef)
 
 
--- a/relnotes/next	Thu Nov 24 10:34:34 2022 +0100
+++ b/relnotes/next	Fri Nov 25 15:14:40 2022 +0100
@@ -16,3 +16,7 @@
 == Internal API Changes ==
 
 == Miscellaneous ==
+
+ * pullbundle support no longer requires setting a server-side option,
+   providing a .hg/pullbundles.manifest according to the syntax specified in
+   'hg help -e clonebundles' is enough.
--- a/rust/Cargo.lock	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/Cargo.lock	Fri Nov 25 15:14:40 2022 +0100
@@ -10,21 +10,26 @@
 
 [[package]]
 name = "adler"
-version = "0.2.3"
+version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
 
 [[package]]
 name = "ahash"
-version = "0.4.7"
+version = "0.8.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e"
+checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "version_check",
+]
 
 [[package]]
 name = "aho-corasick"
-version = "0.7.18"
+version = "0.7.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
 dependencies = [
  "memchr",
 ]
@@ -36,12 +41,12 @@
 checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
 
 [[package]]
-name = "ansi_term"
-version = "0.12.1"
+name = "android_system_properties"
+version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
 dependencies = [
- "winapi",
+ "libc",
 ]
 
 [[package]]
@@ -57,9 +62,9 @@
 
 [[package]]
 name = "autocfg"
-version = "1.0.1"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "bitflags"
@@ -87,14 +92,20 @@
 
 [[package]]
 name = "block-buffer"
-version = "0.10.2"
+version = "0.10.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
 dependencies = [
  "generic-array",
 ]
 
 [[package]]
+name = "bumpalo"
+version = "3.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
+
+[[package]]
 name = "byteorder"
 version = "1.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -111,9 +122,9 @@
 
 [[package]]
 name = "bytes-cast-derive"
-version = "0.1.0"
+version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb"
+checksum = "b13e0e8ffc91021ba28dc98b2ea82099ba4ec07655279c21bfa3313ed96708fc"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -122,58 +133,80 @@
 
 [[package]]
 name = "cc"
-version = "1.0.66"
+version = "1.0.76"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
+checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
 dependencies = [
  "jobserver",
 ]
 
 [[package]]
 name = "cfg-if"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
-
-[[package]]
-name = "cfg-if"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "chrono"
-version = "0.4.19"
+version = "0.4.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
+checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
 dependencies = [
- "libc",
+ "iana-time-zone",
+ "js-sys",
  "num-integer",
  "num-traits",
  "time",
+ "wasm-bindgen",
  "winapi",
 ]
 
 [[package]]
 name = "clap"
-version = "2.34.0"
+version = "4.0.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
 dependencies = [
- "ansi_term",
  "atty",
  "bitflags",
+ "clap_derive",
+ "clap_lex",
+ "once_cell",
  "strsim",
- "textwrap",
- "unicode-width",
- "vec_map",
+ "termcolor",
 ]
 
 [[package]]
-name = "const_fn"
-version = "0.4.4"
+name = "clap_derive"
+version = "4.0.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
+checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
+dependencies = [
+ "os_str_bytes",
+]
+
+[[package]]
+name = "codespan-reporting"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
+dependencies = [
+ "termcolor",
+ "unicode-width",
+]
 
 [[package]]
 name = "convert_case"
@@ -182,28 +215,25 @@
 checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
 
 [[package]]
-name = "cpufeatures"
-version = "0.1.4"
+name = "core-foundation-sys"
+version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed00c67cb5d0a7d64a44f6ad2668db7e7530311dd53ea79bcd4fb022c64911c8"
-dependencies = [
- "libc",
-]
+checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
 
 [[package]]
 name = "cpufeatures"
-version = "0.2.1"
+version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
 dependencies = [
  "libc",
 ]
 
 [[package]]
 name = "cpython"
-version = "0.7.0"
+version = "0.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7d46ba8ace7f3a1d204ac5060a706d0a68de6b42eafb6a586cc08bebcffe664"
+checksum = "3052106c29da7390237bc2310c1928335733b286287754ea85e6093d2495280e"
 dependencies = [
  "libc",
  "num-traits",
@@ -213,74 +243,116 @@
 
 [[package]]
 name = "crc32fast"
-version = "1.2.1"
+version = "1.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
 ]
 
 [[package]]
 name = "crossbeam-channel"
-version = "0.5.2"
+version = "0.5.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
  "crossbeam-utils",
 ]
 
 [[package]]
 name = "crossbeam-deque"
-version = "0.8.0"
+version = "0.8.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
  "crossbeam-epoch",
  "crossbeam-utils",
 ]
 
 [[package]]
 name = "crossbeam-epoch"
-version = "0.9.1"
+version = "0.9.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
+checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
 dependencies = [
- "cfg-if 1.0.0",
- "const_fn",
+ "autocfg",
+ "cfg-if",
  "crossbeam-utils",
- "lazy_static",
  "memoffset",
  "scopeguard",
 ]
 
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.1"
+version = "0.8.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
+checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
 dependencies = [
- "autocfg",
- "cfg-if 1.0.0",
- "lazy_static",
+ "cfg-if",
 ]
 
 [[package]]
 name = "crypto-common"
-version = "0.1.2"
+version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4600d695eb3f6ce1cd44e6e291adceb2cc3ab12f20a33777ecd0bf6eba34e06"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
 dependencies = [
  "generic-array",
+ "typenum",
 ]
 
 [[package]]
 name = "ctor"
-version = "0.1.16"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "cxx"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
+dependencies = [
+ "cc",
+ "cxxbridge-flags",
+ "cxxbridge-macro",
+ "link-cplusplus",
+]
+
+[[package]]
+name = "cxx-build"
+version = "1.0.81"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
+checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
 dependencies = [
+ "cc",
+ "codespan-reporting",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "scratch",
+ "syn",
+]
+
+[[package]]
+name = "cxxbridge-flags"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
+
+[[package]]
+name = "cxxbridge-macro"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
+dependencies = [
+ "proc-macro2",
  "quote",
  "syn",
 ]
@@ -300,9 +372,9 @@
 
 [[package]]
 name = "diff"
-version = "0.1.12"
+version = "0.1.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
 
 [[package]]
 name = "digest"
@@ -315,25 +387,25 @@
 
 [[package]]
 name = "digest"
-version = "0.10.2"
+version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8cb780dce4f9a8f5c087362b3a4595936b2019e7c8b30f2c3e9a7e94e6ae9837"
+checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
 dependencies = [
- "block-buffer 0.10.2",
+ "block-buffer 0.10.3",
  "crypto-common",
 ]
 
 [[package]]
 name = "either"
-version = "1.6.1"
+version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
 
 [[package]]
 name = "env_logger"
-version = "0.9.0"
+version = "0.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
+checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
 dependencies = [
  "atty",
  "humantime",
@@ -344,22 +416,20 @@
 
 [[package]]
 name = "fastrand"
-version = "1.7.0"
+version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
+checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
 dependencies = [
  "instant",
 ]
 
 [[package]]
 name = "flate2"
-version = "1.0.22"
+version = "1.0.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
 dependencies = [
- "cfg-if 1.0.0",
  "crc32fast",
- "libc",
  "libz-sys",
  "miniz_oxide",
 ]
@@ -386,9 +456,9 @@
 
 [[package]]
 name = "generic-array"
-version = "0.14.4"
+version = "0.14.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
 dependencies = [
  "typenum",
  "version_check",
@@ -396,47 +466,47 @@
 
 [[package]]
 name = "getrandom"
-version = "0.1.15"
+version = "0.1.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
 dependencies = [
- "cfg-if 0.1.10",
+ "cfg-if",
  "libc",
  "wasi 0.9.0+wasi-snapshot-preview1",
 ]
 
 [[package]]
 name = "getrandom"
-version = "0.2.4"
+version = "0.2.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
  "libc",
- "wasi 0.10.0+wasi-snapshot-preview1",
+ "wasi 0.11.0+wasi-snapshot-preview1",
 ]
 
 [[package]]
-name = "glob"
-version = "0.3.0"
+name = "hashbrown"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
-
-[[package]]
-name = "hashbrown"
-version = "0.9.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
+checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
 dependencies = [
  "ahash",
  "rayon",
 ]
 
 [[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
 name = "hermit-abi"
-version = "0.1.17"
+version = "0.1.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
 dependencies = [
  "libc",
 ]
@@ -462,7 +532,7 @@
  "hashbrown",
  "home",
  "im-rc",
- "itertools 0.10.3",
+ "itertools",
  "lazy_static",
  "libc",
  "log",
@@ -500,9 +570,9 @@
 
 [[package]]
 name = "home"
-version = "0.5.3"
+version = "0.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
+checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
 dependencies = [
  "winapi",
 ]
@@ -514,13 +584,37 @@
 checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
 
 [[package]]
+name = "iana-time-zone"
+version = "0.1.53"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "wasm-bindgen",
+ "winapi",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
+dependencies = [
+ "cxx",
+ "cxx-build",
+]
+
+[[package]]
 name = "im-rc"
-version = "15.0.0"
+version = "15.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
+checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
 dependencies = [
  "bitmaps",
- "rand_core 0.5.1",
+ "rand_core 0.6.4",
  "rand_xoshiro",
  "sized-chunks",
  "typenum",
@@ -533,37 +627,37 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
 ]
 
 [[package]]
 name = "itertools"
-version = "0.9.0"
+version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itertools"
-version = "0.10.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
 dependencies = [
  "either",
 ]
 
 [[package]]
 name = "jobserver"
-version = "0.1.21"
+version = "0.1.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
+checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
 dependencies = [
  "libc",
 ]
 
 [[package]]
+name = "js-sys"
+version = "0.3.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
 name = "lazy_static"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -571,21 +665,21 @@
 
 [[package]]
 name = "libc"
-version = "0.2.124"
+version = "0.2.137"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
 
 [[package]]
 name = "libm"
-version = "0.2.1"
+version = "0.2.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a"
+checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
 
 [[package]]
 name = "libz-sys"
-version = "1.1.2"
+version = "1.1.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
+checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
 dependencies = [
  "cc",
  "pkg-config",
@@ -593,25 +687,34 @@
 ]
 
 [[package]]
-name = "log"
-version = "0.4.14"
+name = "link-cplusplus"
+version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
+checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
 dependencies = [
- "cfg-if 1.0.0",
+ "cc",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
 ]
 
 [[package]]
 name = "memchr"
-version = "2.4.1"
+version = "2.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
 
 [[package]]
 name = "memmap2"
-version = "0.5.7"
+version = "0.5.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
+checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
 dependencies = [
  "libc",
  "stable_deref_trait",
@@ -619,9 +722,9 @@
 
 [[package]]
 name = "memoffset"
-version = "0.6.1"
+version = "0.6.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
 dependencies = [
  "autocfg",
 ]
@@ -650,19 +753,18 @@
 
 [[package]]
 name = "miniz_oxide"
-version = "0.4.3"
+version = "0.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
 dependencies = [
  "adler",
- "autocfg",
 ]
 
 [[package]]
 name = "num-integer"
-version = "0.1.44"
+version = "0.1.45"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
+checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
 dependencies = [
  "autocfg",
  "num-traits",
@@ -670,9 +772,9 @@
 
 [[package]]
 name = "num-traits"
-version = "0.2.14"
+version = "0.2.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
 dependencies = [
  "autocfg",
  "libm",
@@ -680,9 +782,9 @@
 
 [[package]]
 name = "num_cpus"
-version = "1.13.0"
+version = "1.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
 dependencies = [
  "hermit-abi",
  "libc",
@@ -690,9 +792,9 @@
 
 [[package]]
 name = "once_cell"
-version = "1.14.0"
+version = "1.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
+checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
 
 [[package]]
 name = "opaque-debug"
@@ -701,21 +803,26 @@
 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
 
 [[package]]
+name = "os_str_bytes"
+version = "6.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
+
+[[package]]
 name = "ouroboros"
-version = "0.15.0"
+version = "0.15.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f31a3b678685b150cba82b702dcdc5e155893f63610cf388d30cd988d4ca2bf"
+checksum = "dfbb50b356159620db6ac971c6d5c9ab788c9cc38a6f49619fca2a27acb062ca"
 dependencies = [
  "aliasable",
  "ouroboros_macro",
- "stable_deref_trait",
 ]
 
 [[package]]
 name = "ouroboros_macro"
-version = "0.15.0"
+version = "0.15.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "084fd65d5dd8b3772edccb5ffd1e4b7eba43897ecd0f9401e330e8c542959408"
+checksum = "4a0d9d1a6191c4f391f87219d1ea42b23f09ee84d64763cd05ee6ea88d9f384d"
 dependencies = [
  "Inflector",
  "proc-macro-error",
@@ -726,41 +833,41 @@
 
 [[package]]
 name = "output_vt100"
-version = "0.1.2"
+version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
+checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
 dependencies = [
  "winapi",
 ]
 
 [[package]]
 name = "paste"
-version = "1.0.5"
+version = "1.0.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58"
+checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
 
 [[package]]
 name = "pkg-config"
-version = "0.3.19"
+version = "0.3.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
 
 [[package]]
 name = "ppv-lite86"
-version = "0.2.10"
+version = "0.2.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
 
 [[package]]
 name = "pretty_assertions"
-version = "1.1.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76d5b548b725018ab5496482b45cb8bef21e9fed1858a6d674e3a8a0f0bb5d50"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
 dependencies = [
- "ansi_term",
  "ctor",
  "diff",
  "output_vt100",
+ "yansi",
 ]
 
 [[package]]
@@ -789,18 +896,18 @@
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.24"
+version = "1.0.47"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
 dependencies = [
- "unicode-xid",
+ "unicode-ident",
 ]
 
 [[package]]
 name = "python3-sys"
-version = "0.7.0"
+version = "0.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b18b32e64c103d5045f44644d7ddddd65336f7a0521f6fde673240a9ecceb77e"
+checksum = "49f8b50d72fb3015735aa403eebf19bbd72c093bfeeae24ee798be5f2f1aab52"
 dependencies = [
  "libc",
  "regex",
@@ -808,9 +915,9 @@
 
 [[package]]
 name = "quote"
-version = "1.0.7"
+version = "1.0.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
 dependencies = [
  "proc-macro2",
 ]
@@ -821,7 +928,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
 dependencies = [
- "getrandom 0.1.15",
+ "getrandom 0.1.16",
  "libc",
  "rand_chacha 0.2.2",
  "rand_core 0.5.1",
@@ -836,7 +943,7 @@
 dependencies = [
  "libc",
  "rand_chacha 0.3.1",
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
 ]
 
 [[package]]
@@ -856,7 +963,7 @@
 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
 dependencies = [
  "ppv-lite86",
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
 ]
 
 [[package]]
@@ -865,16 +972,16 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
 dependencies = [
- "getrandom 0.1.15",
+ "getrandom 0.1.16",
 ]
 
 [[package]]
 name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
 dependencies = [
- "getrandom 0.2.4",
+ "getrandom 0.2.8",
 ]
 
 [[package]]
@@ -902,23 +1009,23 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
 dependencies = [
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
 ]
 
 [[package]]
 name = "rand_xoshiro"
-version = "0.4.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
+checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
 dependencies = [
- "rand_core 0.5.1",
+ "rand_core 0.6.4",
 ]
 
 [[package]]
 name = "rayon"
-version = "1.5.1"
+version = "1.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
 dependencies = [
  "autocfg",
  "crossbeam-deque",
@@ -928,31 +1035,30 @@
 
 [[package]]
 name = "rayon-core"
-version = "1.9.1"
+version = "1.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
 dependencies = [
  "crossbeam-channel",
  "crossbeam-deque",
  "crossbeam-utils",
- "lazy_static",
  "num_cpus",
 ]
 
 [[package]]
 name = "redox_syscall"
-version = "0.2.11"
+version = "0.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
 dependencies = [
  "bitflags",
 ]
 
 [[package]]
 name = "regex"
-version = "1.5.5"
+version = "1.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
 dependencies = [
  "aho-corasick",
  "memchr",
@@ -961,9 +1067,9 @@
 
 [[package]]
 name = "regex-syntax"
-version = "0.6.25"
+version = "0.6.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
 
 [[package]]
 name = "remove_dir_all"
@@ -1020,20 +1126,26 @@
 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
 
 [[package]]
+name = "scratch"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
+
+[[package]]
 name = "semver"
-version = "1.0.6"
+version = "1.0.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d"
+checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
 
 [[package]]
 name = "sha-1"
-version = "0.9.6"
+version = "0.9.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
+checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
 dependencies = [
  "block-buffer 0.9.0",
- "cfg-if 1.0.0",
- "cpufeatures 0.1.4",
+ "cfg-if",
+ "cpufeatures",
  "digest 0.9.0",
  "opaque-debug",
 ]
@@ -1044,16 +1156,16 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
 dependencies = [
- "cfg-if 1.0.0",
- "cpufeatures 0.2.1",
- "digest 0.10.2",
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.5",
 ]
 
 [[package]]
 name = "sized-chunks"
-version = "0.6.2"
+version = "0.6.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f"
+checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
 dependencies = [
  "bitmaps",
  "typenum",
@@ -1073,19 +1185,19 @@
 
 [[package]]
 name = "strsim"
-version = "0.8.0"
+version = "0.10.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
 
 [[package]]
 name = "syn"
-version = "1.0.54"
+version = "1.0.103"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
+checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
 dependencies = [
  "proc-macro2",
  "quote",
- "unicode-xid",
+ "unicode-ident",
 ]
 
 [[package]]
@@ -1094,7 +1206,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
  "fastrand",
  "libc",
  "redox_syscall",
@@ -1104,23 +1216,14 @@
 
 [[package]]
 name = "termcolor"
-version = "1.1.2"
+version = "1.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
 dependencies = [
  "winapi-util",
 ]
 
 [[package]]
-name = "textwrap"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
-dependencies = [
- "unicode-width",
-]
-
-[[package]]
 name = "thread_local"
 version = "1.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1142,32 +1245,32 @@
 
 [[package]]
 name = "twox-hash"
-version = "1.6.2"
+version = "1.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0"
+checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
 dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
  "rand 0.8.5",
  "static_assertions",
 ]
 
 [[package]]
 name = "typenum"
-version = "1.12.0"
+version = "1.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
+checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
 
 [[package]]
 name = "unicode-width"
-version = "0.1.9"
+version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
-
-[[package]]
-name = "unicode-xid"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
 
 [[package]]
 name = "users"
@@ -1181,9 +1284,9 @@
 
 [[package]]
 name = "vcpkg"
-version = "0.2.11"
+version = "0.2.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
 
 [[package]]
 name = "vcsgraph"
@@ -1193,20 +1296,14 @@
 dependencies = [
  "hex",
  "rand 0.7.3",
- "sha-1 0.9.6",
+ "sha-1 0.9.8",
 ]
 
 [[package]]
-name = "vec_map"
-version = "0.8.2"
+name = "version_check"
+version = "0.9.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
-
-[[package]]
-name = "version_check"
-version = "0.9.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 
 [[package]]
 name = "wasi"
@@ -1221,14 +1318,74 @@
 checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
 
 [[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
+
+[[package]]
 name = "which"
-version = "4.2.5"
+version = "4.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae"
+checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
 dependencies = [
  "either",
- "lazy_static",
  "libc",
+ "once_cell",
 ]
 
 [[package]]
@@ -1263,19 +1420,25 @@
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+
+[[package]]
 name = "zstd"
-version = "0.5.4+zstd.1.4.7"
+version = "0.11.2+zstd.1.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69996ebdb1ba8b1517f61387a883857818a66c8a295f487b1ffd8fd9d2c82910"
+checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
 dependencies = [
  "zstd-safe",
 ]
 
 [[package]]
 name = "zstd-safe"
-version = "2.0.6+zstd.1.4.7"
+version = "5.0.2+zstd.1.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98aa931fb69ecee256d44589d19754e61851ae4769bf963b385119b1cc37a49e"
+checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db"
 dependencies = [
  "libc",
  "zstd-sys",
@@ -1283,12 +1446,10 @@
 
 [[package]]
 name = "zstd-sys"
-version = "1.4.18+zstd.1.4.7"
+version = "2.0.1+zstd.1.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1e6e8778706838f43f771d80d37787cb2fe06dafe89dd3aebaf6721b9eaec81"
+checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b"
 dependencies = [
  "cc",
- "glob",
- "itertools 0.9.0",
  "libc",
 ]
--- a/rust/README.rst	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/README.rst	Fri Nov 25 15:14:40 2022 +0100
@@ -77,8 +77,8 @@
 Developing Rust
 ===============
 
-The current version of Rust in use is ``1.48.0``, because it's what Debian
-stable has. You can use ``rustup override set 1.48.0`` at the root of the repo
+The current version of Rust in use is ``1.61.0``, because it's what Debian
+testing has. You can use ``rustup override set 1.61.0`` at the root of the repo
 to make it easier on you.
 
 Go to the ``hg-cpython`` folder::
--- a/rust/hg-core/Cargo.toml	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/Cargo.toml	Fri Nov 25 15:14:40 2022 +0100
@@ -3,7 +3,7 @@
 version = "0.1.0"
 authors = ["Georges Racinet <gracinet@anybox.fr>"]
 description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
-edition = "2018"
+edition = "2021"
 
 [lib]
 name = "hg"
@@ -13,40 +13,40 @@
 bytes-cast = "0.2.0"
 byteorder = "1.4.3"
 derive_more = "0.99.17"
-hashbrown = { version = "0.9.1", features = ["rayon"] }
-home = "0.5.3"
-im-rc = "15.0"
-itertools = "0.10.3"
+hashbrown = { version = "0.13.1", features = ["rayon"] }
+home = "0.5.4"
+im-rc = "15.1.0"
+itertools = "0.10.5"
 lazy_static = "1.4.0"
-libc = "0.2"
-ouroboros = "0.15.0"
-rand = "0.8.4"
+libc = "0.2.137"
+ouroboros = "0.15.5"
+rand = "0.8.5"
 rand_pcg = "0.3.1"
 rand_distr = "0.4.3"
-rayon = "1.5.1"
-regex = "1.5.5"
+rayon = "1.5.3"
+regex = "1.7.0"
 sha-1 = "0.10.0"
-twox-hash = "1.6.2"
+twox-hash = "1.6.3"
 same-file = "1.0.6"
-tempfile = "3.1.0"
+tempfile = "3.3.0"
 thread_local = "1.1.4"
-crossbeam-channel = "0.5.0"
+crossbeam-channel = "0.5.6"
 micro-timer = "0.4.0"
-log = "0.4.8"
-memmap2 = { version = "0.5.3", features = ["stable_deref_trait"] }
-zstd = "0.5.3"
+log = "0.4.17"
+memmap2 = { version = "0.5.8", features = ["stable_deref_trait"] }
+zstd = "0.11.2"
 format-bytes = "0.3.0"
 # once_cell 1.15 uses edition 2021, while the heptapod CI
 # uses an old version of Cargo that doesn't support it.
-once_cell = "1.14.0"
+once_cell = "1.16.0"
 
 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
 # we have a clearer view of which backend is the fastest.
 [dependencies.flate2]
-version = "1.0.22"
+version = "1.0.24"
 features = ["zlib"]
 default-features = false
 
 [dev-dependencies]
-clap = "2.34.0"
+clap = { version = "4.0.24", features = ["derive"] }
 pretty_assertions = "1.1.0"
--- a/rust/hg-core/examples/nodemap/main.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/examples/nodemap/main.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -3,7 +3,6 @@
 // This software may be used and distributed according to the terms of the
 // GNU General Public License version 2 or any later version.
 
-use clap::*;
 use hg::revlog::node::*;
 use hg::revlog::nodemap::*;
 use hg::revlog::*;
@@ -13,7 +12,6 @@
 use std::io;
 use std::io::Write;
 use std::path::{Path, PathBuf};
-use std::str::FromStr;
 use std::time::Instant;
 
 mod index;
@@ -86,61 +84,66 @@
 }
 
 fn main() {
-    let matches = App::new("Nodemap pure Rust example")
-        .arg(
-            Arg::with_name("REPOSITORY")
-                .help("Path to the repository, always necessary for its index")
-                .required(true),
-        )
-        .arg(
-            Arg::with_name("NODEMAP_FILE")
-                .help("Path to the nodemap file, independent of REPOSITORY")
-                .required(true),
-        )
-        .subcommand(
-            SubCommand::with_name("create")
-                .about("Create NODEMAP_FILE by scanning repository index"),
-        )
-        .subcommand(
-            SubCommand::with_name("query")
-                .about("Query NODEMAP_FILE for PREFIX")
-                .arg(Arg::with_name("PREFIX").required(true)),
-        )
-        .subcommand(
-            SubCommand::with_name("bench")
-                .about(
-                    "Perform #QUERIES random successful queries on NODEMAP_FILE")
-                .arg(Arg::with_name("QUERIES").required(true)),
-        )
-        .get_matches();
+    use clap::{Parser, Subcommand};
 
-    let repo = matches.value_of("REPOSITORY").unwrap();
-    let nm_path = matches.value_of("NODEMAP_FILE").unwrap();
-
-    let index = mmap_index(&Path::new(repo));
+    #[derive(Parser)]
+    #[command()]
+    /// Nodemap pure Rust example
+    struct App {
+        // Path to the repository, always necessary for its index
+        #[arg(short, long)]
+        repository: PathBuf,
+        // Path to the nodemap file, independent of REPOSITORY
+        #[arg(short, long)]
+        nodemap_file: PathBuf,
+        #[command(subcommand)]
+        command: Command,
+    }
 
-    if let Some(_) = matches.subcommand_matches("create") {
-        println!("Creating nodemap file {} for repository {}", nm_path, repo);
-        create(&index, &Path::new(nm_path)).unwrap();
-        return;
+    #[derive(Subcommand)]
+    enum Command {
+        /// Create `NODEMAP_FILE` by scanning repository index
+        Create,
+        /// Query `NODEMAP_FILE` for `prefix`
+        Query { prefix: String },
+        /// Perform #`QUERIES` random successful queries on `NODEMAP_FILE`
+        Bench { queries: usize },
     }
 
-    let nm = mmap_nodemap(&Path::new(nm_path));
-    if let Some(matches) = matches.subcommand_matches("query") {
-        let prefix = matches.value_of("PREFIX").unwrap();
-        println!(
-            "Querying {} in nodemap file {} of repository {}",
-            prefix, nm_path, repo
-        );
-        query(&index, &nm, prefix);
-    }
-    if let Some(matches) = matches.subcommand_matches("bench") {
-        let queries =
-            usize::from_str(matches.value_of("QUERIES").unwrap()).unwrap();
-        println!(
-            "Doing {} random queries in nodemap file {} of repository {}",
-            queries, nm_path, repo
-        );
-        bench(&index, &nm, queries);
+    let app = App::parse();
+
+    let repo = &app.repository;
+    let nm_path = &app.nodemap_file;
+
+    let index = mmap_index(repo);
+    let nm = mmap_nodemap(nm_path);
+
+    match &app.command {
+        Command::Create => {
+            println!(
+                "Creating nodemap file {} for repository {}",
+                nm_path.display(),
+                repo.display()
+            );
+            create(&index, &Path::new(nm_path)).unwrap();
+        }
+        Command::Bench { queries } => {
+            println!(
+                "Doing {} random queries in nodemap file {} of repository {}",
+                queries,
+                nm_path.display(),
+                repo.display()
+            );
+            bench(&index, &nm, *queries);
+        }
+        Command::Query { prefix } => {
+            println!(
+                "Querying {} in nodemap file {} of repository {}",
+                prefix,
+                nm_path.display(),
+                repo.display()
+            );
+            query(&index, &nm, prefix);
+        }
     }
 }
--- a/rust/hg-core/src/ancestors.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/ancestors.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -357,7 +357,6 @@
 
     use super::*;
     use crate::testing::{SampleGraph, VecGraph};
-    use std::iter::FromIterator;
 
     fn list_ancestors<G: Graph>(
         graph: G,
--- a/rust/hg-core/src/config/values.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/config/values.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -30,10 +30,8 @@
         ("b", 1 << 0), // Needs to be last
     ];
     for &(unit, multiplier) in UNITS {
-        // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+
-        if value.ends_with(unit) {
-            let value_before_unit = &value[..value.len() - unit.len()];
-            let float: f64 = value_before_unit.trim().parse().ok()?;
+        if let Some(value) = value.strip_suffix(unit) {
+            let float: f64 = value.trim().parse().ok()?;
             if float >= 0.0 {
                 return Some((float * multiplier as f64).round() as u64);
             } else {
--- a/rust/hg-core/src/dirstate/entry.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/dirstate/entry.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,7 +1,6 @@
 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
 use crate::errors::HgError;
 use bitflags::bitflags;
-use std::convert::{TryFrom, TryInto};
 use std::fs;
 use std::io;
 use std::time::{SystemTime, UNIX_EPOCH};
--- a/rust/hg-core/src/dirstate/parsers.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/dirstate/parsers.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -9,7 +9,6 @@
 use byteorder::{BigEndian, WriteBytesExt};
 use bytes_cast::{unaligned, BytesCast};
 use micro_timer::timed;
-use std::convert::TryFrom;
 
 /// Parents are stored in the dirstate as byte hashes.
 pub const PARENT_SIZE: usize = 20;
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/dirstate_tree/on_disk.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -17,7 +17,6 @@
 use format_bytes::format_bytes;
 use rand::Rng;
 use std::borrow::Cow;
-use std::convert::{TryFrom, TryInto};
 use std::fmt::Write;
 
 /// Added at the start of `.hg/dirstate` when the "v2" format is used.
--- a/rust/hg-core/src/dirstate_tree/status.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/dirstate_tree/status.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -15,7 +15,6 @@
 use crate::utils::hg_path::HgPath;
 use crate::BadMatch;
 use crate::DirstateStatus;
-use crate::HgPathBuf;
 use crate::HgPathCow;
 use crate::PatternFileWarning;
 use crate::StatusError;
@@ -147,7 +146,6 @@
     let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
     let has_ignored_ancestor = HasIgnoredAncestor::create(None, hg_path);
     let root_cached_mtime = None;
-    let root_dir_metadata = None;
     // If the path we have for the repository root is a symlink, do follow it.
     // (As opposed to symlinks within the working directory which are not
     // followed, using `std::fs::symlink_metadata`.)
@@ -155,8 +153,12 @@
         &has_ignored_ancestor,
         dmap.root.as_ref(),
         hg_path,
-        &root_dir,
-        root_dir_metadata,
+        &DirEntry {
+            hg_path: Cow::Borrowed(HgPath::new(b"")),
+            fs_path: Cow::Borrowed(&root_dir),
+            symlink_metadata: None,
+            file_type: FakeFileType::Directory,
+        },
         root_cached_mtime,
         is_at_repo_root,
     )?;
@@ -340,7 +342,7 @@
     /// need to call `read_dir`.
     fn can_skip_fs_readdir(
         &self,
-        directory_metadata: Option<&std::fs::Metadata>,
+        directory_entry: &DirEntry,
         cached_directory_mtime: Option<TruncatedTimestamp>,
     ) -> bool {
         if !self.options.list_unknown && !self.options.list_ignored {
@@ -356,9 +358,9 @@
                 // The dirstate contains a cached mtime for this directory, set
                 // by a previous run of the `status` algorithm which found this
                 // directory eligible for `read_dir` caching.
-                if let Some(meta) = directory_metadata {
+                if let Ok(meta) = directory_entry.symlink_metadata() {
                     if cached_mtime
-                        .likely_equal_to_mtime_of(meta)
+                        .likely_equal_to_mtime_of(&meta)
                         .unwrap_or(false)
                     {
                         // The mtime of that directory has not changed
@@ -379,26 +381,40 @@
         has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
         dirstate_nodes: ChildNodesRef<'tree, 'on_disk>,
         directory_hg_path: &BorrowedPath<'tree, 'on_disk>,
-        directory_fs_path: &Path,
-        directory_metadata: Option<&std::fs::Metadata>,
+        directory_entry: &DirEntry,
         cached_directory_mtime: Option<TruncatedTimestamp>,
         is_at_repo_root: bool,
     ) -> Result<bool, DirstateV2ParseError> {
-        if self.can_skip_fs_readdir(directory_metadata, cached_directory_mtime)
-        {
+        if self.can_skip_fs_readdir(directory_entry, cached_directory_mtime) {
             dirstate_nodes
                 .par_iter()
                 .map(|dirstate_node| {
-                    let fs_path = directory_fs_path.join(get_path_from_bytes(
+                    let fs_path = &directory_entry.fs_path;
+                    let fs_path = fs_path.join(get_path_from_bytes(
                         dirstate_node.base_name(self.dmap.on_disk)?.as_bytes(),
                     ));
                     match std::fs::symlink_metadata(&fs_path) {
-                        Ok(fs_metadata) => self.traverse_fs_and_dirstate(
-                            &fs_path,
-                            &fs_metadata,
-                            dirstate_node,
-                            has_ignored_ancestor,
-                        ),
+                        Ok(fs_metadata) => {
+                            let file_type =
+                                match fs_metadata.file_type().try_into() {
+                                    Ok(file_type) => file_type,
+                                    Err(_) => return Ok(()),
+                                };
+                            let entry = DirEntry {
+                                hg_path: Cow::Borrowed(
+                                    dirstate_node
+                                        .full_path(&self.dmap.on_disk)?,
+                                ),
+                                fs_path: Cow::Borrowed(&fs_path),
+                                symlink_metadata: Some(fs_metadata),
+                                file_type,
+                            };
+                            self.traverse_fs_and_dirstate(
+                                &entry,
+                                dirstate_node,
+                                has_ignored_ancestor,
+                            )
+                        }
                         Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
                             self.traverse_dirstate_only(dirstate_node)
                         }
@@ -419,7 +435,7 @@
 
         let mut fs_entries = if let Ok(entries) = self.read_dir(
             directory_hg_path,
-            directory_fs_path,
+            &directory_entry.fs_path,
             is_at_repo_root,
         ) {
             entries
@@ -435,7 +451,7 @@
         let dirstate_nodes = dirstate_nodes.sorted();
         // `sort_unstable_by_key` doesn’t allow keys borrowing from the value:
         // https://github.com/rust-lang/rust/issues/34162
-        fs_entries.sort_unstable_by(|e1, e2| e1.base_name.cmp(&e2.base_name));
+        fs_entries.sort_unstable_by(|e1, e2| e1.hg_path.cmp(&e2.hg_path));
 
         // Propagate here any error that would happen inside the comparison
         // callback below
@@ -451,7 +467,7 @@
                 dirstate_node
                     .base_name(self.dmap.on_disk)
                     .unwrap()
-                    .cmp(&fs_entry.base_name)
+                    .cmp(&fs_entry.hg_path)
             },
         )
         .par_bridge()
@@ -461,8 +477,7 @@
             match pair {
                 Both(dirstate_node, fs_entry) => {
                     self.traverse_fs_and_dirstate(
-                        &fs_entry.full_path,
-                        &fs_entry.metadata,
+                        &fs_entry,
                         dirstate_node,
                         has_ignored_ancestor,
                     )?;
@@ -487,23 +502,21 @@
 
     fn traverse_fs_and_dirstate<'ancestor>(
         &self,
-        fs_path: &Path,
-        fs_metadata: &std::fs::Metadata,
+        fs_entry: &DirEntry,
         dirstate_node: NodeRef<'tree, 'on_disk>,
         has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
     ) -> Result<(), DirstateV2ParseError> {
         let outdated_dircache =
             self.check_for_outdated_directory_cache(&dirstate_node)?;
         let hg_path = &dirstate_node.full_path_borrowed(self.dmap.on_disk)?;
-        let file_type = fs_metadata.file_type();
-        let file_or_symlink = file_type.is_file() || file_type.is_symlink();
+        let file_or_symlink = fs_entry.is_file() || fs_entry.is_symlink();
         if !file_or_symlink {
             // If we previously had a file here, it was removed (with
             // `hg rm` or similar) or deleted before it could be
             // replaced by a directory or something else.
             self.mark_removed_or_deleted_if_file(&dirstate_node)?;
         }
-        if file_type.is_dir() {
+        if fs_entry.is_dir() {
             if self.options.collect_traversed_dirs {
                 self.outcome
                     .lock()
@@ -521,14 +534,13 @@
                     &is_ignored,
                     dirstate_node.children(self.dmap.on_disk)?,
                     hg_path,
-                    fs_path,
-                    Some(fs_metadata),
+                    fs_entry,
                     dirstate_node.cached_directory_mtime()?,
                     is_at_repo_root,
                 )?;
             self.maybe_save_directory_mtime(
                 children_all_have_dirstate_node_or_are_ignored,
-                fs_metadata,
+                fs_entry,
                 dirstate_node,
                 outdated_dircache,
             )?
@@ -550,7 +562,7 @@
                     } else if entry.modified() {
                         self.push_outcome(Outcome::Modified, &dirstate_node)?;
                     } else {
-                        self.handle_normal_file(&dirstate_node, fs_metadata)?;
+                        self.handle_normal_file(&dirstate_node, fs_entry)?;
                     }
                 } else {
                     // `node.entry.is_none()` indicates a "directory"
@@ -578,7 +590,7 @@
     fn maybe_save_directory_mtime(
         &self,
         children_all_have_dirstate_node_or_are_ignored: bool,
-        directory_metadata: &std::fs::Metadata,
+        directory_entry: &DirEntry,
         dirstate_node: NodeRef<'tree, 'on_disk>,
         outdated_directory_cache: bool,
     ) -> Result<(), DirstateV2ParseError> {
@@ -605,11 +617,13 @@
         // resolution based on the filesystem (for example ext3
         // only stores integer seconds), kernel (see
         // https://stackoverflow.com/a/14393315/1162888), etc.
+        let metadata = match directory_entry.symlink_metadata() {
+            Ok(meta) => meta,
+            Err(_) => return Ok(()),
+        };
         let directory_mtime = if let Ok(option) =
-            TruncatedTimestamp::for_reliable_mtime_of(
-                directory_metadata,
-                status_start,
-            ) {
+            TruncatedTimestamp::for_reliable_mtime_of(&metadata, status_start)
+        {
             if let Some(directory_mtime) = option {
                 directory_mtime
             } else {
@@ -671,18 +685,23 @@
     fn handle_normal_file(
         &self,
         dirstate_node: &NodeRef<'tree, 'on_disk>,
-        fs_metadata: &std::fs::Metadata,
+        fs_entry: &DirEntry,
     ) -> Result<(), DirstateV2ParseError> {
         // Keep the low 31 bits
         fn truncate_u64(value: u64) -> i32 {
             (value & 0x7FFF_FFFF) as i32
         }
 
+        let fs_metadata = match fs_entry.symlink_metadata() {
+            Ok(meta) => meta,
+            Err(_) => return Ok(()),
+        };
+
         let entry = dirstate_node
             .entry()?
             .expect("handle_normal_file called with entry-less node");
         let mode_changed =
-            || self.options.check_exec && entry.mode_changed(fs_metadata);
+            || self.options.check_exec && entry.mode_changed(&fs_metadata);
         let size = entry.size();
         let size_changed = size != truncate_u64(fs_metadata.len());
         if size >= 0 && size_changed && fs_metadata.file_type().is_symlink() {
@@ -697,7 +716,7 @@
         } else {
             let mtime_looks_clean;
             if let Some(dirstate_mtime) = entry.truncated_mtime() {
-                let fs_mtime = TruncatedTimestamp::for_mtime_of(fs_metadata)
+                let fs_mtime = TruncatedTimestamp::for_mtime_of(&fs_metadata)
                     .expect("OS/libc does not support mtime?");
                 // There might be a change in the future if for example the
                 // internal clock become off while process run, but this is a
@@ -767,10 +786,9 @@
         directory_hg_path: &HgPath,
         fs_entry: &DirEntry,
     ) -> bool {
-        let hg_path = directory_hg_path.join(&fs_entry.base_name);
-        let file_type = fs_entry.metadata.file_type();
-        let file_or_symlink = file_type.is_file() || file_type.is_symlink();
-        if file_type.is_dir() {
+        let hg_path = directory_hg_path.join(&fs_entry.hg_path);
+        let file_or_symlink = fs_entry.is_file() || fs_entry.is_symlink();
+        if fs_entry.is_dir() {
             let is_ignored =
                 has_ignored_ancestor || (self.ignore_fn)(&hg_path);
             let traverse_children = if is_ignored {
@@ -783,11 +801,9 @@
             };
             if traverse_children {
                 let is_at_repo_root = false;
-                if let Ok(children_fs_entries) = self.read_dir(
-                    &hg_path,
-                    &fs_entry.full_path,
-                    is_at_repo_root,
-                ) {
+                if let Ok(children_fs_entries) =
+                    self.read_dir(&hg_path, &fs_entry.fs_path, is_at_repo_root)
+                {
                     children_fs_entries.par_iter().for_each(|child_fs_entry| {
                         self.traverse_fs_only(
                             is_ignored,
@@ -850,15 +866,46 @@
     }
 }
 
-struct DirEntry {
-    base_name: HgPathBuf,
-    full_path: PathBuf,
-    metadata: std::fs::Metadata,
+/// Since [`std::fs::FileType`] cannot be built directly, we emulate what we
+/// care about.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum FakeFileType {
+    File,
+    Directory,
+    Symlink,
 }
 
-impl DirEntry {
-    /// Returns **unsorted** entries in the given directory, with name and
-    /// metadata.
+impl TryFrom<std::fs::FileType> for FakeFileType {
+    type Error = ();
+
+    fn try_from(f: std::fs::FileType) -> Result<Self, Self::Error> {
+        if f.is_dir() {
+            Ok(Self::Directory)
+        } else if f.is_file() {
+            Ok(Self::File)
+        } else if f.is_symlink() {
+            Ok(Self::Symlink)
+        } else {
+            // Things like FIFO etc.
+            Err(())
+        }
+    }
+}
+
+struct DirEntry<'a> {
+    /// Path as stored in the dirstate, or just the filename for optimization.
+    hg_path: HgPathCow<'a>,
+    /// Filesystem path
+    fs_path: Cow<'a, Path>,
+    /// Lazily computed
+    symlink_metadata: Option<std::fs::Metadata>,
+    /// Already computed for ergonomics.
+    file_type: FakeFileType,
+}
+
+impl<'a> DirEntry<'a> {
+    /// Returns **unsorted** entries in the given directory, with name,
+    /// metadata and file type.
     ///
     /// If a `.hg` sub-directory is encountered:
     ///
@@ -872,7 +919,7 @@
         let mut results = Vec::new();
         for entry in read_dir_path.read_dir()? {
             let entry = entry?;
-            let metadata = match entry.metadata() {
+            let file_type = match entry.file_type() {
                 Ok(v) => v,
                 Err(e) => {
                     // race with file deletion?
@@ -889,7 +936,7 @@
                 if is_at_repo_root {
                     // Skip the repo’s own .hg (might be a symlink)
                     continue;
-                } else if metadata.is_dir() {
+                } else if file_type.is_dir() {
                     // A .hg sub-directory at another location means a subrepo,
                     // skip it entirely.
                     return Ok(Vec::new());
@@ -900,15 +947,40 @@
             } else {
                 entry.path()
             };
-            let base_name = get_bytes_from_os_string(file_name).into();
+            let filename =
+                Cow::Owned(get_bytes_from_os_string(file_name).into());
+            let file_type = match FakeFileType::try_from(file_type) {
+                Ok(file_type) => file_type,
+                Err(_) => continue,
+            };
             results.push(DirEntry {
-                base_name,
-                full_path,
-                metadata,
+                hg_path: filename,
+                fs_path: Cow::Owned(full_path.to_path_buf()),
+                symlink_metadata: None,
+                file_type,
             })
         }
         Ok(results)
     }
+
+    fn symlink_metadata(&self) -> Result<std::fs::Metadata, std::io::Error> {
+        match &self.symlink_metadata {
+            Some(meta) => Ok(meta.clone()),
+            None => std::fs::symlink_metadata(&self.fs_path),
+        }
+    }
+
+    fn is_dir(&self) -> bool {
+        self.file_type == FakeFileType::Directory
+    }
+
+    fn is_file(&self) -> bool {
+        self.file_type == FakeFileType::File
+    }
+
+    fn is_symlink(&self) -> bool {
+        self.file_type == FakeFileType::Symlink
+    }
 }
 
 /// Return the `mtime` of a temporary file newly-created in the `.hg` directory
--- a/rust/hg-core/src/lock.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/lock.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -2,7 +2,6 @@
 
 use crate::errors::HgError;
 use crate::errors::HgResultExt;
-use crate::utils::StrExt;
 use crate::vfs::Vfs;
 use std::io;
 use std::io::ErrorKind;
@@ -107,7 +106,7 @@
 /// running anymore.
 fn lock_should_be_broken(data: &Option<String>) -> bool {
     (|| -> Option<bool> {
-        let (prefix, pid) = data.as_ref()?.split_2(':')?;
+        let (prefix, pid) = data.as_ref()?.split_once(':')?;
         if prefix != &*LOCK_PREFIX {
             return Some(false);
         }
--- a/rust/hg-core/src/matchers.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/matchers.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -27,7 +27,6 @@
 use std::borrow::ToOwned;
 use std::collections::HashSet;
 use std::fmt::{Display, Error, Formatter};
-use std::iter::FromIterator;
 use std::ops::Deref;
 use std::path::{Path, PathBuf};
 
--- a/rust/hg-core/src/revlog/index.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/revlog/index.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,4 +1,3 @@
-use std::convert::TryInto;
 use std::ops::Deref;
 
 use byteorder::{BigEndian, ByteOrder};
--- a/rust/hg-core/src/revlog/node.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/revlog/node.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -10,7 +10,6 @@
 
 use crate::errors::HgError;
 use bytes_cast::BytesCast;
-use std::convert::{TryFrom, TryInto};
 use std::fmt;
 
 /// The length in bytes of a `Node`
--- a/rust/hg-core/src/revlog/nodemap_docket.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/revlog/nodemap_docket.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -3,7 +3,6 @@
 use memmap2::Mmap;
 use std::path::{Path, PathBuf};
 
-use crate::utils::strip_suffix;
 use crate::vfs::Vfs;
 
 const ONDISK_VERSION: u8 = 1;
@@ -97,8 +96,9 @@
         .expect("expected a base name")
         .to_str()
         .expect("expected an ASCII file name in the store");
-    let prefix = strip_suffix(docket_name, ".n.a")
-        .or_else(|| strip_suffix(docket_name, ".n"))
+    let prefix = docket_name
+        .strip_suffix(".n.a")
+        .or_else(|| docket_name.strip_suffix(".n"))
         .expect("expected docket path in .n or .n.a");
     let name = format!("{}-{}.nd", prefix, uid);
     docket_path
--- a/rust/hg-core/src/revlog/revlog.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/revlog/revlog.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,5 +1,4 @@
 use std::borrow::Cow;
-use std::convert::TryFrom;
 use std::io::Read;
 use std::ops::Deref;
 use std::path::Path;
@@ -518,7 +517,7 @@
         } else {
             let cap = self.uncompressed_len.max(0) as usize;
             let mut buf = vec![0; cap];
-            let len = zstd::block::decompress_to_buffer(self.bytes, &mut buf)
+            let len = zstd::bulk::decompress_to_buffer(self.bytes, &mut buf)
                 .map_err(|e| corrupted(e.to_string()))?;
             if len != self.uncompressed_len as usize {
                 Err(corrupted("uncompressed length does not match"))
--- a/rust/hg-core/src/utils.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/utils.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -145,21 +145,6 @@
     }
 }
 
-pub trait StrExt {
-    // TODO: Use https://doc.rust-lang.org/nightly/std/primitive.str.html#method.split_once
-    // once we require Rust 1.52+
-    fn split_2(&self, separator: char) -> Option<(&str, &str)>;
-}
-
-impl StrExt for str {
-    fn split_2(&self, separator: char) -> Option<(&str, &str)> {
-        let mut iter = self.splitn(2, separator);
-        let a = iter.next()?;
-        let b = iter.next()?;
-        Some((a, b))
-    }
-}
-
 pub trait Escaped {
     /// Return bytes escaped for display to the user
     fn escaped_bytes(&self) -> Vec<u8>;
@@ -211,28 +196,20 @@
     }
 }
 
-// TODO: use the str method when we require Rust 1.45
-pub(crate) fn strip_suffix<'a>(s: &'a str, suffix: &str) -> Option<&'a str> {
-    if s.ends_with(suffix) {
-        Some(&s[..s.len() - suffix.len()])
-    } else {
-        None
-    }
-}
-
 #[cfg(unix)]
 pub fn shell_quote(value: &[u8]) -> Vec<u8> {
-    // TODO: Use the `matches!` macro when we require Rust 1.42+
-    if value.iter().all(|&byte| match byte {
-        b'a'..=b'z'
-        | b'A'..=b'Z'
-        | b'0'..=b'9'
-        | b'.'
-        | b'_'
-        | b'/'
-        | b'+'
-        | b'-' => true,
-        _ => false,
+    if value.iter().all(|&byte| {
+        matches!(
+            byte,
+            b'a'..=b'z'
+            | b'A'..=b'Z'
+            | b'0'..=b'9'
+            | b'.'
+            | b'_'
+            | b'/'
+            | b'+'
+            | b'-'
+        )
     }) {
         value.to_owned()
     } else {
--- a/rust/hg-core/src/utils/hg_path.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-core/src/utils/hg_path.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -8,7 +8,6 @@
 use crate::utils::SliceExt;
 use std::borrow::Borrow;
 use std::borrow::Cow;
-use std::convert::TryFrom;
 use std::ffi::{OsStr, OsString};
 use std::fmt;
 use std::ops::Deref;
--- a/rust/hg-cpython/Cargo.toml	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-cpython/Cargo.toml	Fri Nov 25 15:14:40 2022 +0100
@@ -2,18 +2,18 @@
 name = "hg-cpython"
 version = "0.1.0"
 authors = ["Georges Racinet <gracinet@anybox.fr>"]
-edition = "2018"
+edition = "2021"
 
 [lib]
 name='rusthg'
 crate-type = ["cdylib"]
 
 [dependencies]
-cpython = { version = "0.7.0", features = ["extension-module"] }
-crossbeam-channel = "0.5.2"
+cpython = { version = "0.7.1", features = ["extension-module"] }
+crossbeam-channel = "0.5.6"
 hg-core = { path = "../hg-core"}
-libc = "0.2.119"
-log = "0.4.14"
-env_logger = "0.9.0"
+libc = "0.2.137"
+log = "0.4.17"
+env_logger = "0.9.3"
 stable_deref_trait = "1.2.0"
 vcsgraph = "0.2.0"
--- a/rust/hg-cpython/src/conversion.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-cpython/src/conversion.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -10,7 +10,6 @@
 
 use cpython::{ObjectProtocol, PyObject, PyResult, Python};
 use hg::Revision;
-use std::iter::FromIterator;
 
 /// Utility function to convert a Python iterable into various collections
 ///
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -9,7 +9,6 @@
 //! `hg-core` package.
 
 use std::cell::{RefCell, RefMut};
-use std::convert::TryInto;
 
 use cpython::{
     exc, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, PyNone, PyObject,
--- a/rust/hg-cpython/src/utils.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/hg-cpython/src/utils.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,7 +1,6 @@
 use cpython::exc::ValueError;
 use cpython::{PyBytes, PyDict, PyErr, PyObject, PyResult, PyTuple, Python};
 use hg::revlog::Node;
-use std::convert::TryFrom;
 
 #[allow(unused)]
 pub fn print_python_trace(py: Python) -> PyResult<PyObject> {
--- a/rust/rhg/Cargo.toml	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/Cargo.toml	Fri Nov 25 15:14:40 2022 +0100
@@ -5,21 +5,21 @@
     "Antoine Cezar <antoine.cezar@octobus.net>",
     "Raphaël Gomès <raphael.gomes@octobus.net>",
 ]
-edition = "2018"
+edition = "2021"
 
 [dependencies]
 atty = "0.2.14"
 hg-core = { path = "../hg-core"}
-chrono = "0.4.19"
-clap = "2.34.0"
+chrono = "0.4.23"
+clap = { version = "4.0.24", features = ["cargo"] }
 derive_more = "0.99.17"
-home = "0.5.3"
+home = "0.5.4"
 lazy_static = "1.4.0"
-log = "0.4.14"
+log = "0.4.17"
 micro-timer = "0.4.0"
-regex = "1.5.5"
-env_logger = "0.9.0"
+regex = "1.7.0"
+env_logger = "0.9.3"
 format-bytes = "0.3.0"
 users = "0.11.0"
-which = "4.2.5"
-rayon = "1.5.1"
+which = "4.3.0"
+rayon = "1.5.3"
--- a/rust/rhg/src/commands/cat.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/cat.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -4,28 +4,28 @@
 use hg::operations::cat;
 use hg::utils::hg_path::HgPathBuf;
 use micro_timer::timed;
-use std::convert::TryFrom;
+use std::ffi::OsString;
+use std::os::unix::prelude::OsStrExt;
 
 pub const HELP_TEXT: &str = "
 Output the current or given revision of files
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("cat")
+pub fn args() -> clap::Command {
+    clap::command!("cat")
         .arg(
-            Arg::with_name("rev")
+            Arg::new("rev")
                 .help("search the repository as it is in REV")
-                .short("-r")
-                .long("--rev")
-                .value_name("REV")
-                .takes_value(true),
+                .short('r')
+                .long("rev")
+                .value_name("REV"),
         )
         .arg(
-            clap::Arg::with_name("files")
+            clap::Arg::new("files")
                 .required(true)
-                .multiple(true)
-                .empty_values(false)
+                .num_args(1..)
                 .value_name("FILE")
+                .value_parser(clap::value_parser!(std::ffi::OsString))
                 .help("Files to output"),
         )
         .about(HELP_TEXT)
@@ -42,11 +42,15 @@
         ));
     }
 
-    let rev = invocation.subcommand_args.value_of("rev");
-    let file_args = match invocation.subcommand_args.values_of("files") {
-        Some(files) => files.collect(),
-        None => vec![],
-    };
+    let rev = invocation.subcommand_args.get_one::<String>("rev");
+    let file_args =
+        match invocation.subcommand_args.get_many::<OsString>("files") {
+            Some(files) => files
+                .filter(|s| !s.is_empty())
+                .map(|s| s.as_os_str())
+                .collect(),
+            None => vec![],
+        };
 
     let repo = invocation.repo?;
     let cwd = hg::utils::current_dir()?;
@@ -54,8 +58,8 @@
     let working_directory = cwd.join(working_directory); // Make it absolute
 
     let mut files = vec![];
-    for file in file_args.iter() {
-        if file.starts_with("set:") {
+    for file in file_args {
+        if file.as_bytes().starts_with(b"set:") {
             let message = "fileset";
             return Err(CommandError::unsupported(message));
         }
@@ -63,7 +67,7 @@
         let normalized = cwd.join(&file);
         // TODO: actually normalize `..` path segments etc?
         let dotted = normalized.components().any(|c| c.as_os_str() == "..");
-        if file == &"." || dotted {
+        if file.as_bytes() == b"." || dotted {
             let message = "`..` or `.` path segment";
             return Err(CommandError::unsupported(message));
         }
@@ -75,7 +79,7 @@
             .map_err(|_| {
                 CommandError::abort(format!(
                     "abort: {} not under root '{}'\n(consider using '--cwd {}')",
-                    file,
+                    String::from_utf8_lossy(file.as_bytes()),
                     working_directory.display(),
                     relative_path.display(),
                 ))
--- a/rust/rhg/src/commands/config.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/config.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -8,14 +8,13 @@
 With one argument of the form section.name, print just the value of that config item.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("config")
+pub fn args() -> clap::Command {
+    clap::command!("config")
         .arg(
-            Arg::with_name("name")
+            Arg::new("name")
                 .help("the section.name to print")
                 .value_name("NAME")
-                .required(true)
-                .takes_value(true),
+                .required(true),
         )
         .about(HELP_TEXT)
 }
@@ -23,7 +22,7 @@
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let (section, name) = invocation
         .subcommand_args
-        .value_of("name")
+        .get_one::<String>("name")
         .expect("missing required CLI argument")
         .as_bytes()
         .split_2(b'.')
--- a/rust/rhg/src/commands/debugdata.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/debugdata.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -8,27 +8,27 @@
 Dump the contents of a data file revision
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("debugdata")
+pub fn args() -> clap::Command {
+    clap::command!("debugdata")
         .arg(
-            Arg::with_name("changelog")
+            Arg::new("changelog")
                 .help("open changelog")
-                .short("-c")
-                .long("--changelog"),
+                .short('c')
+                .action(clap::ArgAction::SetTrue),
         )
         .arg(
-            Arg::with_name("manifest")
+            Arg::new("manifest")
                 .help("open manifest")
-                .short("-m")
-                .long("--manifest"),
+                .short('m')
+                .action(clap::ArgAction::SetTrue),
         )
         .group(
-            ArgGroup::with_name("")
+            ArgGroup::new("revlog")
                 .args(&["changelog", "manifest"])
                 .required(true),
         )
         .arg(
-            Arg::with_name("rev")
+            Arg::new("rev")
                 .help("revision")
                 .required(true)
                 .value_name("REV"),
@@ -40,19 +40,21 @@
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let args = invocation.subcommand_args;
     let rev = args
-        .value_of("rev")
+        .get_one::<String>("rev")
         .expect("rev should be a required argument");
-    let kind =
-        match (args.is_present("changelog"), args.is_present("manifest")) {
-            (true, false) => DebugDataKind::Changelog,
-            (false, true) => DebugDataKind::Manifest,
-            (true, true) => {
-                unreachable!("Should not happen since options are exclusive")
-            }
-            (false, false) => {
-                unreachable!("Should not happen since options are required")
-            }
-        };
+    let kind = match (
+        args.get_one::<bool>("changelog").unwrap(),
+        args.get_one::<bool>("manifest").unwrap(),
+    ) {
+        (true, false) => DebugDataKind::Changelog,
+        (false, true) => DebugDataKind::Manifest,
+        (true, true) => {
+            unreachable!("Should not happen since options are exclusive")
+        }
+        (false, false) => {
+            unreachable!("Should not happen since options are required")
+        }
+    };
 
     let repo = invocation.repo?;
     if repo.has_narrow() {
@@ -60,7 +62,7 @@
             "support for ellipsis nodes is missing and repo has narrow enabled",
         ));
     }
-    let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?;
+    let data = debug_data(repo, rev, kind).map_err(|e| (e, rev.as_ref()))?;
 
     let mut stdout = invocation.ui.stdout_buffer();
     stdout.write_all(&data)?;
--- a/rust/rhg/src/commands/debugignorerhg.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/debugignorerhg.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,5 +1,4 @@
 use crate::error::CommandError;
-use clap::SubCommand;
 use hg;
 use hg::matchers::get_ignore_matcher;
 use hg::StatusError;
@@ -13,8 +12,8 @@
 Some options might be missing, check the list below.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    SubCommand::with_name("debugignorerhg").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+    clap::command!("debugignorerhg").about(HELP_TEXT)
 }
 
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/debugrequirements.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/debugrequirements.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -4,8 +4,8 @@
 Print the current repo requirements.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("debugrequirements").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+    clap::command!("debugrequirements").about(HELP_TEXT)
 }
 
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/debugrhgsparse.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/debugrhgsparse.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,19 +1,21 @@
-use std::os::unix::prelude::OsStrExt;
+use std::{
+    ffi::{OsStr, OsString},
+    os::unix::prelude::OsStrExt,
+};
 
 use crate::error::CommandError;
-use clap::SubCommand;
 use hg::{self, utils::hg_path::HgPath};
 
 pub const HELP_TEXT: &str = "";
 
-pub fn args() -> clap::App<'static, 'static> {
-    SubCommand::with_name("debugrhgsparse")
+pub fn args() -> clap::Command {
+    clap::command!("debugrhgsparse")
         .arg(
-            clap::Arg::with_name("files")
+            clap::Arg::new("files")
+                .value_name("FILES")
                 .required(true)
-                .multiple(true)
-                .empty_values(false)
-                .value_name("FILES")
+                .num_args(1..)
+                .value_parser(clap::value_parser!(std::ffi::OsString))
                 .help("Files to check against sparse profile"),
         )
         .about(HELP_TEXT)
@@ -23,8 +25,12 @@
     let repo = invocation.repo?;
 
     let (matcher, _warnings) = hg::sparse::matcher(&repo).unwrap();
-    let files = invocation.subcommand_args.values_of_os("files");
+    let files = invocation.subcommand_args.get_many::<OsString>("files");
     if let Some(files) = files {
+        let files: Vec<&OsStr> = files
+            .filter(|s| !s.is_empty())
+            .map(|s| s.as_os_str())
+            .collect();
         for file in files {
             invocation.ui.write_stdout(b"matches: ")?;
             invocation.ui.write_stdout(
--- a/rust/rhg/src/commands/files.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/files.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -14,15 +14,14 @@
 Returns 0 on success.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("files")
+pub fn args() -> clap::Command {
+    clap::command!("files")
         .arg(
-            Arg::with_name("rev")
+            Arg::new("rev")
                 .help("search the repository as it is in REV")
-                .short("-r")
-                .long("--revision")
-                .value_name("REV")
-                .takes_value(true),
+                .short('r')
+                .long("revision")
+                .value_name("REV"),
         )
         .about(HELP_TEXT)
 }
@@ -35,7 +34,7 @@
         ));
     }
 
-    let rev = invocation.subcommand_args.value_of("rev");
+    let rev = invocation.subcommand_args.get_one::<String>("rev");
 
     let repo = invocation.repo?;
 
@@ -57,7 +56,8 @@
                 "rhg files -r <rev> is not supported in narrow clones",
             ));
         }
-        let files = list_rev_tracked_files(repo, rev).map_err(|e| (e, rev))?;
+        let files = list_rev_tracked_files(repo, rev)
+            .map_err(|e| (e, rev.as_ref()))?;
         display_files(invocation.ui, repo, files.iter())
     } else {
         // The dirstate always reflects the sparse narrowspec, so if
--- a/rust/rhg/src/commands/root.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/root.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -9,8 +9,8 @@
 Returns 0 on success.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    clap::SubCommand::with_name("root").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+    clap::command!("root").about(HELP_TEXT)
 }
 
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/status.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/commands/status.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -8,7 +8,7 @@
 use crate::error::CommandError;
 use crate::ui::Ui;
 use crate::utils::path_utils::RelativizePaths;
-use clap::{Arg, SubCommand};
+use clap::Arg;
 use format_bytes::format_bytes;
 use hg::config::Config;
 use hg::dirstate::has_exec_bit;
@@ -41,75 +41,86 @@
 Some options might be missing, check the list below.
 ";
 
-pub fn args() -> clap::App<'static, 'static> {
-    SubCommand::with_name("status")
+pub fn args() -> clap::Command {
+    clap::command!("status")
         .alias("st")
         .about(HELP_TEXT)
         .arg(
-            Arg::with_name("all")
+            Arg::new("all")
                 .help("show status of all files")
-                .short("-A")
-                .long("--all"),
+                .short('A')
+                .action(clap::ArgAction::SetTrue)
+                .long("all"),
         )
         .arg(
-            Arg::with_name("modified")
+            Arg::new("modified")
                 .help("show only modified files")
-                .short("-m")
-                .long("--modified"),
+                .short('m')
+                .action(clap::ArgAction::SetTrue)
+                .long("modified"),
         )
         .arg(
-            Arg::with_name("added")
+            Arg::new("added")
                 .help("show only added files")
-                .short("-a")
-                .long("--added"),
+                .short('a')
+                .action(clap::ArgAction::SetTrue)
+                .long("added"),
         )
         .arg(
-            Arg::with_name("removed")
+            Arg::new("removed")
                 .help("show only removed files")
-                .short("-r")
-                .long("--removed"),
+                .short('r')
+                .action(clap::ArgAction::SetTrue)
+                .long("removed"),
         )
         .arg(
-            Arg::with_name("clean")
+            Arg::new("clean")
                 .help("show only clean files")
-                .short("-c")
-                .long("--clean"),
+                .short('c')
+                .action(clap::ArgAction::SetTrue)
+                .long("clean"),
         )
         .arg(
-            Arg::with_name("deleted")
+            Arg::new("deleted")
                 .help("show only deleted files")
-                .short("-d")
-                .long("--deleted"),
+                .short('d')
+                .action(clap::ArgAction::SetTrue)
+                .long("deleted"),
         )
         .arg(
-            Arg::with_name("unknown")
+            Arg::new("unknown")
                 .help("show only unknown (not tracked) files")
-                .short("-u")
-                .long("--unknown"),
+                .short('u')
+                .action(clap::ArgAction::SetTrue)
+                .long("unknown"),
         )
         .arg(
-            Arg::with_name("ignored")
+            Arg::new("ignored")
                 .help("show only ignored files")
-                .short("-i")
-                .long("--ignored"),
+                .short('i')
+                .action(clap::ArgAction::SetTrue)
+                .long("ignored"),
         )
         .arg(
-            Arg::with_name("copies")
+            Arg::new("copies")
                 .help("show source of copied files (DEFAULT: ui.statuscopies)")
-                .short("-C")
-                .long("--copies"),
+                .short('C')
+                .action(clap::ArgAction::SetTrue)
+                .long("copies"),
         )
         .arg(
-            Arg::with_name("no-status")
+            Arg::new("no-status")
                 .help("hide status prefix")
-                .short("-n")
-                .long("--no-status"),
+                .short('n')
+                .action(clap::ArgAction::SetTrue)
+                .long("no-status"),
         )
         .arg(
-            Arg::with_name("verbose")
+            Arg::new("verbose")
                 .help("enable additional output")
-                .short("-v")
-                .long("--verbose"),
+                .short('v')
+                .action(clap::ArgAction::SetTrue)
+                .long("verbose"),
         )
 }
 
@@ -200,25 +211,25 @@
     let config = invocation.config;
     let args = invocation.subcommand_args;
 
-    let verbose = !args.is_present("print0")
-        && (args.is_present("verbose")
-            || config.get_bool(b"ui", b"verbose")?
-            || config.get_bool(b"commands", b"status.verbose")?);
+    // TODO add `!args.get_flag("print0") &&` when we support `print0`
+    let verbose = args.get_flag("verbose")
+        || config.get_bool(b"ui", b"verbose")?
+        || config.get_bool(b"commands", b"status.verbose")?;
 
-    let all = args.is_present("all");
+    let all = args.get_flag("all");
     let display_states = if all {
         // TODO when implementing `--quiet`: it excludes clean files
         // from `--all`
         ALL_DISPLAY_STATES
     } else {
         let requested = DisplayStates {
-            modified: args.is_present("modified"),
-            added: args.is_present("added"),
-            removed: args.is_present("removed"),
-            clean: args.is_present("clean"),
-            deleted: args.is_present("deleted"),
-            unknown: args.is_present("unknown"),
-            ignored: args.is_present("ignored"),
+            modified: args.get_flag("modified"),
+            added: args.get_flag("added"),
+            removed: args.get_flag("removed"),
+            clean: args.get_flag("clean"),
+            deleted: args.get_flag("deleted"),
+            unknown: args.get_flag("unknown"),
+            ignored: args.get_flag("ignored"),
         };
         if requested.is_empty() {
             DEFAULT_DISPLAY_STATES
@@ -226,9 +237,9 @@
             requested
         }
     };
-    let no_status = args.is_present("no-status");
+    let no_status = args.get_flag("no-status");
     let list_copies = all
-        || args.is_present("copies")
+        || args.get_flag("copies")
         || config.get_bool(b"ui", b"statuscopies")?;
 
     let repo = invocation.repo?;
--- a/rust/rhg/src/main.rs	Thu Nov 24 10:34:34 2022 +0100
+++ b/rust/rhg/src/main.rs	Fri Nov 25 15:14:40 2022 +0100
@@ -1,10 +1,7 @@
 extern crate log;
 use crate::error::CommandError;
 use crate::ui::{local_to_utf8, Ui};
-use clap::App;
-use clap::AppSettings;
-use clap::Arg;
-use clap::ArgMatches;
+use clap::{command, Arg, ArgMatches};
 use format_bytes::{format_bytes, join};
 use hg::config::{Config, ConfigSource, PlainInfo};
 use hg::repo::{Repo, RepoError};
@@ -35,55 +32,47 @@
 ) -> Result<(), CommandError> {
     check_unsupported(config, repo)?;
 
-    let app = App::new("rhg")
-        .global_setting(AppSettings::AllowInvalidUtf8)
-        .global_setting(AppSettings::DisableVersion)
-        .setting(AppSettings::SubcommandRequired)
-        .setting(AppSettings::VersionlessSubcommands)
+    let app = command!()
+        .subcommand_required(true)
         .arg(
-            Arg::with_name("repository")
+            Arg::new("repository")
                 .help("repository root directory")
-                .short("-R")
-                .long("--repository")
+                .short('R')
                 .value_name("REPO")
-                .takes_value(true)
                 // Both ok: `hg -R ./foo log` or `hg log -R ./foo`
                 .global(true),
         )
         .arg(
-            Arg::with_name("config")
+            Arg::new("config")
                 .help("set/override config option (use 'section.name=value')")
-                .long("--config")
                 .value_name("CONFIG")
-                .takes_value(true)
                 .global(true)
+                .long("config")
                 // Ok: `--config section.key1=val --config section.key2=val2`
-                .multiple(true)
                 // Not ok: `--config section.key1=val section.key2=val2`
-                .number_of_values(1),
+                .action(clap::ArgAction::Append),
         )
         .arg(
-            Arg::with_name("cwd")
+            Arg::new("cwd")
                 .help("change working directory")
-                .long("--cwd")
                 .value_name("DIR")
-                .takes_value(true)
+                .long("cwd")
                 .global(true),
         )
         .arg(
-            Arg::with_name("color")
+            Arg::new("color")
                 .help("when to colorize (boolean, always, auto, never, or debug)")
-                .long("--color")
                 .value_name("TYPE")
-                .takes_value(true)
+                .long("color")
                 .global(true),
         )
         .version("0.0.1");
     let app = add_subcommand_args(app);
 
-    let matches = app.clone().get_matches_from_safe(argv.iter())?;
+    let matches = app.clone().try_get_matches_from(argv.iter())?;
 
-    let (subcommand_name, subcommand_matches) = matches.subcommand();
+    let (subcommand_name, subcommand_args) =
+        matches.subcommand().expect("subcommand required");
 
     // Mercurial allows users to define "defaults" for commands, fallback
     // if a default is detected for the current command
@@ -104,9 +93,7 @@
         }
     }
     let run = subcommand_run_fn(subcommand_name)
-        .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired");
-    let subcommand_args = subcommand_matches
-        .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired");
+        .expect("unknown subcommand name from clap despite Command::subcommand_required");
 
     let invocation = CliInvocation {
         ui,
@@ -535,7 +522,7 @@
             )+
         }
 
-        fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
+        fn add_subcommand_args(app: clap::Command) -> clap::Command {
             app
             $(
                 .subcommand(commands::$command::args())
@@ -569,7 +556,7 @@
 
 pub struct CliInvocation<'a> {
     ui: &'a Ui,
-    subcommand_args: &'a ArgMatches<'a>,
+    subcommand_args: &'a ArgMatches,
     config: &'a Config,
     /// References inside `Result` is a bit peculiar but allow
     /// `invocation.repo?` to work out with `&CliInvocation` since this
--- a/setup.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/setup.py	Fri Nov 25 15:14:40 2022 +0100
@@ -1607,6 +1607,10 @@
     'mercurial.helptext.internals': [
         '*.txt',
     ],
+    'mercurial.thirdparty.attr': [
+        '*.pyi',
+        'py.typed',
+    ],
 }
 
 
--- a/tests/test-bundle.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-bundle.t	Fri Nov 25 15:14:40 2022 +0100
@@ -1039,6 +1039,24 @@
   $ hg bundle -a --config devel.bundle.delta=full ./full.hg
   3 changesets found
 
+
+Test the debug statistic when building a bundle
+-----------------------------------------------
+
+  $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
+  3 changesets found
+  DEBUG-BUNDLING: revisions:                9
+  DEBUG-BUNDLING:   changelog:              3
+  DEBUG-BUNDLING:   manifest:               3
+  DEBUG-BUNDLING:   files:                  3 (for 3 revlogs)
+  DEBUG-BUNDLING: deltas:
+  DEBUG-BUNDLING:   from-storage:           2 (100% of available 2)
+  DEBUG-BUNDLING:   computed:               7
+  DEBUG-BUNDLING:     full:                 7 (100% of native 7)
+  DEBUG-BUNDLING:       changelog:          3 (100% of native 3)
+  DEBUG-BUNDLING:       manifests:          1 (100% of native 1)
+  DEBUG-BUNDLING:       files:              3 (100% of native 3)
+
 Test the debug output when applying delta
 -----------------------------------------
 
@@ -1048,18 +1066,62 @@
   > --config storage.revlog.reuse-external-delta=no \
   > --config storage.revlog.reuse-external-delta-parent=no
   adding changesets
-  DBG-DELTAS: CHANGELOG:   rev=0: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: CHANGELOG:   rev=1: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: CHANGELOG:   rev=2: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: CHANGELOG:   rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: CHANGELOG:   rev=1: delta-base=1 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: CHANGELOG:   rev=2: delta-base=2 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
   adding manifests
-  DBG-DELTAS: MANIFESTLOG: rev=0: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: MANIFESTLOG: rev=1: search-rounds=1 try-count=1 - delta-type=delta  snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: MANIFESTLOG: rev=2: search-rounds=1 try-count=1 - delta-type=delta  snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta  snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta  snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
   adding file changes
-  DBG-DELTAS: FILELOG:a:   rev=0: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: FILELOG:b:   rev=0: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
-  DBG-DELTAS: FILELOG:c:   rev=0: search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: FILELOG:a:   rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: FILELOG:b:   rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: FILELOG:c:   rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full   snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
   added 3 changesets with 3 changes to 3 files
   new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
   (run 'hg update' to get a working copy)
 
+
+Test the debug statistic when applying a bundle
+-----------------------------------------------
+
+  $ hg init bar
+  $ hg -R bar unbundle ./default.hg  --config debug.unbundling-stats=yes
+  adding changesets
+  adding manifests
+  adding file changes
+  DEBUG-UNBUNDLING: revisions:                9
+  DEBUG-UNBUNDLING:   changelog:              3             ( 33%)
+  DEBUG-UNBUNDLING:   manifests:              3             ( 33%)
+  DEBUG-UNBUNDLING:   files:                  3             ( 33%)
+  DEBUG-UNBUNDLING: total-time:      ?????????????? seconds (glob)
+  DEBUG-UNBUNDLING:   changelog:     ?????????????? seconds (???%) (glob)
+  DEBUG-UNBUNDLING:   manifests:     ?????????????? seconds (???%) (glob)
+  DEBUG-UNBUNDLING:   files:         ?????????????? seconds (???%) (glob)
+  DEBUG-UNBUNDLING: type-count:
+  DEBUG-UNBUNDLING:   changelog:
+  DEBUG-UNBUNDLING:     full:                 3
+  DEBUG-UNBUNDLING:       cached:             0             (  0%)
+  DEBUG-UNBUNDLING:   manifests:
+  DEBUG-UNBUNDLING:     full:                 1
+  DEBUG-UNBUNDLING:       cached:             0             (  0%)
+  DEBUG-UNBUNDLING:     delta:                2
+  DEBUG-UNBUNDLING:       cached:             2             (100%)
+  DEBUG-UNBUNDLING:   files:
+  DEBUG-UNBUNDLING:     full:                 3
+  DEBUG-UNBUNDLING:       cached:             0             (  0%)
+  DEBUG-UNBUNDLING: type-time:
+  DEBUG-UNBUNDLING:   changelog:
+  DEBUG-UNBUNDLING:     full:        ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:       cached:    ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:   manifests:
+  DEBUG-UNBUNDLING:     full:        ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:       cached:    ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:     delta:       ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:       cached:    ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:   files:
+  DEBUG-UNBUNDLING:     full:        ?????????????? seconds (???% of total) (glob)
+  DEBUG-UNBUNDLING:       cached:    ?????????????? seconds (???% of total) (glob)
+  added 3 changesets with 3 changes to 3 files
+  new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
+  (run 'hg update' to get a working copy)
--- a/tests/test-bundle2-exchange.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-bundle2-exchange.t	Fri Nov 25 15:14:40 2022 +0100
@@ -739,12 +739,10 @@
   $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
   pushing to ssh://user@dummy/other
   searching for changes
-  remote: Fail early! (no-py3 chg !)
   remote: adding changesets
   remote: adding manifests
   remote: adding file changes
   remote: Fail early! (py3 !)
-  remote: Fail early! (no-py3 no-chg !)
   remote: transaction abort!
   remote: Cleaning up the mess...
   remote: rollback completed
--- a/tests/test-completion.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-completion.t	Fri Nov 25 15:14:40 2022 +0100
@@ -268,7 +268,7 @@
   config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
   continue: dry-run
   copy: forget, after, at-rev, force, include, exclude, dry-run
-  debug-delta-find: changelog, manifest, dir, template
+  debug-delta-find: changelog, manifest, dir, template, source
   debug-repair-issue6528: to-report, from-report, paranoid, dry-run
   debug-revlog-index: changelog, manifest, dir, template
   debugancestor: 
--- a/tests/test-convert-filemap.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-convert-filemap.t	Fri Nov 25 15:14:40 2022 +0100
@@ -292,12 +292,12 @@
   $ rm -rf source/.hg/store/data/dir/file4
 #endif
   $ hg -q convert --filemap renames.fmap --datesort source dummydest
-  abort: data/dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
+  abort: dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
   abort: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !)
   [50]
   $ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo
-  ignoring: data/dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
-  ignoring: data/dir/file4@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !)
+  ignoring: dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
+  ignoring: dir/file4@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !)
   ignoring: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !)
   ignoring: data/dir/file4/index@6edd55f559cd: no node (reposimplestore !)
   $ hg up -q -R renames.repo
--- a/tests/test-convert-hg-source.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-convert-hg-source.t	Fri Nov 25 15:14:40 2022 +0100
@@ -182,7 +182,7 @@
   sorting...
   converting...
   4 init
-  ignoring: data/b@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !)
+  ignoring: b@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !)
   ignoring: data/b/index@1e88685f5dde: no node (reposimplestore !)
   3 changeall
   2 changebagain
--- a/tests/test-debugcommands.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-debugcommands.t	Fri Nov 25 15:14:40 2022 +0100
@@ -39,6 +39,9 @@
   chunks size   : 191
       0x75 (u)  : 191 (100.00%)
   
+  
+  total-stored-content: 188 bytes
+  
   avg chain length  :  0
   max chain length  :  0
   max chain reach   : 67
@@ -74,6 +77,9 @@
       empty     :  0 ( 0.00%)
       0x75 (u)  : 88 (100.00%)
   
+  
+  total-stored-content: 86 bytes
+  
   avg chain length  :  0
   max chain length  :  0
   max chain reach   : 44
@@ -107,6 +113,9 @@
   chunks size   : 3
       0x75 (u)  : 3 (100.00%)
   
+  
+  total-stored-content: 2 bytes
+  
   avg chain length  : 0
   max chain length  : 0
   max chain reach   : 3
--- a/tests/test-demandimport.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-demandimport.py	Fri Nov 25 15:14:40 2022 +0100
@@ -234,3 +234,11 @@
 zipfileimp = __import__('ftplib', globals(), locals(), ['unknownattr'])
 assert f(zipfileimp) == "<module 'ftplib' from '?'>", f(zipfileimp)
 assert not util.safehasattr(zipfileimp, 'unknownattr')
+
+
+# test deactivation for issue6725
+del sys.modules['telnetlib']
+with demandimport.deactivated():
+    import telnetlib
+assert telnetlib.__loader__ == telnetlib.__spec__.loader
+assert telnetlib.__loader__.get_resource_reader
--- a/tests/test-install.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-install.t	Fri Nov 25 15:14:40 2022 +0100
@@ -233,42 +233,3 @@
   checking username (test)
   no problems detected
 #endif
-
-#if virtualenv no-py3 network-io no-pyoxidizer
-
-Note: --no-site-packages is the default for all versions enabled by hghave
-
-  $ "$PYTHON" -m virtualenv installenv >> pip.log
-  DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
-  DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
-
-Note: we use this weird path to run pip and hg to avoid platform differences,
-since it's bin on most platforms but Scripts on Windows.
-  $ ./installenv/*/pip install $TESTDIR/.. >> pip.log
-  DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
-  DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
-  DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. pip 21.0 will drop support for Python 2.7 in January 2021. More details about Python 2 support in pip can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support pip 21.0 will remove support for this functionality. (?)
-  $ ./installenv/*/hg debuginstall || cat pip.log
-  checking encoding (ascii)...
-  checking Python executable (*) (glob)
-  checking Python implementation (*) (glob)
-  checking Python version (2.*) (glob)
-  checking Python lib (*)... (glob)
-  checking Python security support (*) (glob)
-    TLS 1.2 not supported by Python install; network connections lack modern security (?)
-    SNI not supported by Python install; may have connectivity issues with some servers (?)
-  checking Rust extensions \((installed|missing)\) (re)
-  checking Mercurial version (*) (glob)
-  checking Mercurial custom build (*) (glob)
-  checking module policy (*) (glob)
-  checking installed modules (*/mercurial)... (glob)
-  checking registered compression engines (*) (glob)
-  checking available compression engines (*) (glob)
-  checking available compression engines for wire protocol (*) (glob)
-  checking "re2" regexp engine \((available|missing)\) (re)
-  checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
-  checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
-  checking commit editor... (*) (glob)
-  checking username (test)
-  no problems detected
-#endif
--- a/tests/test-lfs.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-lfs.t	Fri Nov 25 15:14:40 2022 +0100
@@ -787,8 +787,8 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
-   large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+   l@1: unpacking 46a2f24864bc: integrity check failed on l:0
+   large@0: unpacking 2c531e0992ff: integrity check failed on large:0
   checked 5 changesets with 10 changes to 4 files
   2 integrity errors encountered!
   (first damaged changeset appears to be 0)
@@ -897,9 +897,9 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
+   l@1: unpacking 46a2f24864bc: integrity check failed on l:0
   lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
-   large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+   large@0: unpacking 2c531e0992ff: integrity check failed on large:0
   lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
   lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
   checked 5 changesets with 10 changes to 4 files
@@ -941,8 +941,8 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
-   large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+   l@1: unpacking 46a2f24864bc: integrity check failed on l:0
+   large@0: unpacking 2c531e0992ff: integrity check failed on large:0
   checked 5 changesets with 10 changes to 4 files
   2 integrity errors encountered!
   (first damaged changeset appears to be 0)
@@ -967,9 +967,9 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
+   l@1: unpacking 46a2f24864bc: integrity check failed on l:0
   lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
-   large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+   large@0: unpacking 2c531e0992ff: integrity check failed on large:0
   lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
   lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
   checked 5 changesets with 10 changes to 4 files
@@ -987,7 +987,7 @@
 Accessing a corrupt file will complain
 
   $ hg --cwd fromcorrupt2 cat -r 0 large
-  abort: integrity check failed on data/large:0
+  abort: integrity check failed on large:0
   [50]
 
 lfs -> normal -> lfs round trip conversions are possible.  The 'none()'
--- a/tests/test-narrow-exchange.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-narrow-exchange.t	Fri Nov 25 15:14:40 2022 +0100
@@ -218,8 +218,8 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 0 changes to 0 files (no-lfs-on !)
-  remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
+  remote: error: pretxnchangegroup.lfs hook raised an exception: inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
   remote: transaction abort! (lfs-on !)
   remote: rollback completed (lfs-on !)
-  remote: abort: data/inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
+  remote: abort: inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
   abort: stream ended unexpectedly (got 0 bytes, expected 4) (lfs-on !)
--- a/tests/test-pull-bundle.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-pull-bundle.t	Fri Nov 25 15:14:40 2022 +0100
@@ -33,8 +33,6 @@
 
   $ cd repo
   $ cat <<EOF > .hg/hgrc
-  > [server]
-  > pullbundle = True
   > [experimental]
   > evolution = True
   > [extensions]
--- a/tests/test-rhg.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-rhg.t	Fri Nov 25 15:14:40 2022 +0100
@@ -4,12 +4,11 @@
 
 Unimplemented command
   $ $NO_FALLBACK rhg unimplemented-command
-  unsupported feature: error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context
+  unsupported feature: error: The subcommand 'unimplemented-command' wasn't recognized
   
-  USAGE:
-      rhg [OPTIONS] <SUBCOMMAND>
+  Usage: rhg [OPTIONS] <COMMAND>
   
-  For more information try --help
+  For more information try '--help'
   
   [252]
   $ rhg unimplemented-command --config rhg.on-unsupported=abort-silent
@@ -159,10 +158,11 @@
   $ $NO_FALLBACK rhg cat original --exclude="*.rs"
   unsupported feature: error: Found argument '--exclude' which wasn't expected, or isn't valid in this context
   
-  USAGE:
-      rhg cat [OPTIONS] <FILE>...
+    If you tried to supply '--exclude' as a value rather than a flag, use '-- --exclude'
   
-  For more information try --help
+  Usage: rhg cat <FILE>...
+  
+  For more information try '--help'
   
   [252]
   $ rhg cat original --exclude="*.rs"
@@ -190,10 +190,11 @@
   Blocking recursive fallback. The 'rhg.fallback-executable = rhg' config points to `rhg` itself.
   unsupported feature: error: Found argument '--exclude' which wasn't expected, or isn't valid in this context
   
-  USAGE:
-      rhg cat [OPTIONS] <FILE>...
+    If you tried to supply '--exclude' as a value rather than a flag, use '-- --exclude'
   
-  For more information try --help
+  Usage: rhg cat <FILE>...
+  
+  For more information try '--help'
   
   [252]
 
--- a/tests/test-shelve.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-shelve.t	Fri Nov 25 15:14:40 2022 +0100
@@ -1600,6 +1600,7 @@
   $ rm -r .hg/shelve*
 
 #if phasebased
+  $ cp $HGRCPATH $TESTTMP/hgrc-saved
   $ cat <<EOF >> $HGRCPATH
   > [shelve]
   > store = strip
@@ -1628,3 +1629,32 @@
 #if stripbased
   $ hg log --hidden --template '{user}\n'
 #endif
+
+clean up
+
+#if phasebased
+  $ mv $TESTTMP/hgrc-saved $HGRCPATH
+#endif
+
+changed files should be reachable in all shelves
+
+create an extension that emits changed files
+
+  $ cat > shelve-changed-files.py << EOF
+  > """Command to emit changed files for a shelf"""
+  > 
+  > from mercurial import registrar, shelve
+  > 
+  > cmdtable = {}
+  > command = registrar.command(cmdtable)
+  > 
+  > 
+  > @command(b'shelve-changed-files')
+  > def shelve_changed_files(ui, repo, name):
+  >     shelf = shelve.ShelfDir(repo).get(name)
+  >     for file in shelf.changed_files(ui, repo):
+  >         ui.write(file + b'\n')
+  > EOF
+
+  $ hg --config extensions.shelve-changed-files=shelve-changed-files.py shelve-changed-files default
+  somefile.py
--- a/tests/test-sparse-revlog.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-sparse-revlog.t	Fri Nov 25 15:14:40 2022 +0100
@@ -105,11 +105,11 @@
                      delta :        0 (100.00%)
       snapshot  :      383 ( 7.66%)
         lvl-0   :              3 ( 0.06%)
-        lvl-1   :             18 ( 0.36%)
-        lvl-2   :             62 ( 1.24%)
-        lvl-3   :            108 ( 2.16%)
-        lvl-4   :            191 ( 3.82%)
-        lvl-5   :              1 ( 0.02%)
+        lvl-1   :             18 ( 0.36%)  non-ancestor-bases:        9 (50.00%)
+        lvl-2   :             62 ( 1.24%)  non-ancestor-bases:       58 (93.55%)
+        lvl-3   :            108 ( 2.16%)  non-ancestor-bases:      108 (100.00%)
+        lvl-4   :            191 ( 3.82%)  non-ancestor-bases:      180 (94.24%)
+        lvl-5   :              1 ( 0.02%)  non-ancestor-bases:        1 (100.00%)
       deltas    :     4618 (92.34%)
   revision size : 58616973
       snapshot  :  9247844 (15.78%)
@@ -126,6 +126,9 @@
   chunks size   : 58616973
       0x28      : 58616973 (100.00%)
   
+  
+  total-stored-content: 1 732 705 361 bytes
+  
   avg chain length  :        9
   max chain length  :       15
   max chain reach   : 27366701
@@ -144,9 +147,11 @@
   deltas against prev  : 3906 (84.58%)
       where prev = p1  : 3906     (100.00%)
       where prev = p2  :    0     ( 0.00%)
-      other            :    0     ( 0.00%)
+      other-ancestor   :    0     ( 0.00%)
+      unrelated        :    0     ( 0.00%)
   deltas against p1    :  649 (14.05%)
   deltas against p2    :   63 ( 1.36%)
+  deltas against ancs  :    0 ( 0.00%)
   deltas against other :    0 ( 0.00%)
 
 
@@ -159,7 +164,7 @@
      4971    4970      -1       3        5     4930    snap      19179     346472     427596   1.23414  15994877  15567281   36.40652     427596     179288   1.00000        5
   $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
   DBG-DELTAS-SEARCH: SEARCH rev=4971
-  DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
   DBG-DELTAS-SEARCH:   CANDIDATE: rev=4962
   DBG-DELTAS-SEARCH:     type=snapshot-4
   DBG-DELTAS-SEARCH:     size=18296
@@ -167,11 +172,43 @@
   DBG-DELTAS-SEARCH:     uncompressed-delta-size=30377
   DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
   DBG-DELTAS-SEARCH:     DELTA: length=16872 (BAD)
-  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4971
+  DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
+  DBG-DELTAS-SEARCH:     type=snapshot-3
+  DBG-DELTAS-SEARCH:     size=39228
+  DBG-DELTAS-SEARCH:     base=4799
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=33050
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=19179 (GOOD)
+  DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+  DBG-DELTAS-SEARCH:   CONTENDER: rev=4930 - length=19179
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4799
+  DBG-DELTAS-SEARCH:     type=snapshot-2
+  DBG-DELTAS-SEARCH:     size=50213
+  DBG-DELTAS-SEARCH:     base=4623
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=82661
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=49132 (BAD)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+
+  $ cat << EOF >>.hg/hgrc
+  > [storage]
+  > revlog.optimize-delta-parent-choice = no
+  > revlog.reuse-external-delta = yes
+  > EOF
+
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
+  DBG-DELTAS-SEARCH: SEARCH rev=4971
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4962
   DBG-DELTAS-SEARCH:     type=snapshot-4
-  DBG-DELTAS-SEARCH:     size=19179
+  DBG-DELTAS-SEARCH:     size=18296
   DBG-DELTAS-SEARCH:     base=4930
-  DBG-DELTAS-SEARCH:     TOO-HIGH
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=30377
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=16872 (BAD)
   DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
   DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
   DBG-DELTAS-SEARCH:     type=snapshot-3
@@ -189,6 +226,101 @@
   DBG-DELTAS-SEARCH:     uncompressed-delta-size=82661
   DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
   DBG-DELTAS-SEARCH:     DELTA: length=49132 (BAD)
-  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
+  DBG-DELTAS-SEARCH: SEARCH rev=4971
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
+  DBG-DELTAS-SEARCH:     type=snapshot-3
+  DBG-DELTAS-SEARCH:     size=39228
+  DBG-DELTAS-SEARCH:     base=4799
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=33050
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=19179 (GOOD)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=1 - search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
+  DBG-DELTAS-SEARCH: SEARCH rev=4971
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4962
+  DBG-DELTAS-SEARCH:     type=snapshot-4
+  DBG-DELTAS-SEARCH:     size=18296
+  DBG-DELTAS-SEARCH:     base=4930
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=30377
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=16872 (BAD)
+  DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
+  DBG-DELTAS-SEARCH:     type=snapshot-3
+  DBG-DELTAS-SEARCH:     size=39228
+  DBG-DELTAS-SEARCH:     base=4799
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=33050
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=19179 (GOOD)
+  DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+  DBG-DELTAS-SEARCH:   CONTENDER: rev=4930 - length=19179
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4799
+  DBG-DELTAS-SEARCH:     type=snapshot-2
+  DBG-DELTAS-SEARCH:     size=50213
+  DBG-DELTAS-SEARCH:     base=4623
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=82661
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=49132 (BAD)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
+  DBG-DELTAS-SEARCH: SEARCH rev=4971
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4962
+  DBG-DELTAS-SEARCH:     type=snapshot-4
+  DBG-DELTAS-SEARCH:     size=18296
+  DBG-DELTAS-SEARCH:     base=4930
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=30377
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=16872 (BAD)
+  DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
+  DBG-DELTAS-SEARCH:     type=snapshot-3
+  DBG-DELTAS-SEARCH:     size=39228
+  DBG-DELTAS-SEARCH:     base=4799
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=33050
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=19179 (GOOD)
+  DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+  DBG-DELTAS-SEARCH:   CONTENDER: rev=4930 - length=19179
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4799
+  DBG-DELTAS-SEARCH:     type=snapshot-2
+  DBG-DELTAS-SEARCH:     size=50213
+  DBG-DELTAS-SEARCH:     base=4623
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=82661
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=49132 (BAD)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+  $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
+  DBG-DELTAS-SEARCH: SEARCH rev=4971
+  DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4962
+  DBG-DELTAS-SEARCH:     type=snapshot-4
+  DBG-DELTAS-SEARCH:     size=18296
+  DBG-DELTAS-SEARCH:     base=4930
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=30377
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=16872 (BAD)
+  DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4930
+  DBG-DELTAS-SEARCH:     type=snapshot-3
+  DBG-DELTAS-SEARCH:     size=39228
+  DBG-DELTAS-SEARCH:     base=4799
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=33050
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=19179 (GOOD)
+  DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+  DBG-DELTAS-SEARCH:   CONTENDER: rev=4930 - length=19179
+  DBG-DELTAS-SEARCH:   CANDIDATE: rev=4799
+  DBG-DELTAS-SEARCH:     type=snapshot-2
+  DBG-DELTAS-SEARCH:     size=50213
+  DBG-DELTAS-SEARCH:     base=4623
+  DBG-DELTAS-SEARCH:     uncompressed-delta-size=82661
+  DBG-DELTAS-SEARCH:     delta-search-time=* (glob)
+  DBG-DELTAS-SEARCH:     DELTA: length=49132 (BAD)
+  DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
 
   $ cd ..
--- a/tests/test-ssh-bundle1.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-ssh-bundle1.t	Fri Nov 25 15:14:40 2022 +0100
@@ -293,9 +293,7 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files (py3 !)
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
   remote: KABOOM
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
   $ hg -R ../remote heads
   changeset:   5:1383141674ec
   tag:         tip
@@ -463,9 +461,7 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files (py3 !)
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
   remote: KABOOM
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
   local stdout
 
 debug output
--- a/tests/test-ssh.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-ssh.t	Fri Nov 25 15:14:40 2022 +0100
@@ -290,10 +290,8 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files (py3 !)
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
   remote: KABOOM
   remote: KABOOM IN PROCESS
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
   $ hg -R ../remote heads
   changeset:   5:1383141674ec
   tag:         tip
@@ -515,10 +513,8 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files (py3 !)
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
   remote: KABOOM
   remote: KABOOM IN PROCESS
-  remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
   local stdout
 
 debug output
--- a/tests/test-util.py	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-util.py	Fri Nov 25 15:14:40 2022 +0100
@@ -50,7 +50,7 @@
 
 # attr.s default factory for util.timedstats.start binds the timer we
 # need to mock out.
-_start_default = (util.timedcmstats.start.default, 'factory')
+_start_default = (util.timedcmstats.__attrs_attrs__.start.default, 'factory')
 
 
 @contextlib.contextmanager
--- a/tests/test-verify.t	Thu Nov 24 10:34:34 2022 +0100
+++ b/tests/test-verify.t	Fri Nov 25 15:14:40 2022 +0100
@@ -297,7 +297,7 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   a@1: broken revlog! (index data/a is corrupted)
+   a@1: broken revlog! (index a is corrupted)
   warning: orphan data file 'data/a.i'
   checked 2 changesets with 0 changes to 1 files
   1 warnings encountered!
@@ -350,7 +350,7 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-   base64@0: unpacking 794cee7777cb: integrity check failed on data/base64:0
+   base64@0: unpacking 794cee7777cb: integrity check failed on base64:0
   checked 1 changesets with 1 changes to 1 files
   1 integrity errors encountered!
   (first damaged changeset appears to be 0)