changeset 50819:3ccef7902070

branching: merge stable into default
author Raphaël Gomès <rgomes@octobus.net>
date Mon, 07 Aug 2023 11:08:00 +0200
parents 5c3d07950bac (diff) 04d5cde28a7f (current diff)
children b922c767b214
files contrib/perf.py mercurial/extensions.py tests/test-transaction-rollback-on-revlog-split.t
diffstat 94 files changed, 5407 insertions(+), 7460 deletions(-) [+]
line wrap: on
line diff
--- a/contrib/byteify-strings.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/byteify-strings.py	Mon Aug 07 11:08:00 2023 +0200
@@ -212,18 +212,14 @@
             fn = t.string
 
             # *attr() builtins don't accept byte strings to 2nd argument.
-            if (
-                fn
-                in (
-                    'getattr',
-                    'setattr',
-                    'hasattr',
-                    'safehasattr',
-                    'wrapfunction',
-                    'wrapclass',
-                    'addattr',
-                )
-                and (opts['allow-attr-methods'] or not _isop(i - 1, '.'))
+            if fn in (
+                'getattr',
+                'setattr',
+                'hasattr',
+                'safehasattr',
+                'wrapfunction',
+                'wrapclass',
+                'addattr',
             ):
                 arg1idx = _findargnofcall(1)
                 if arg1idx is not None:
@@ -312,12 +308,6 @@
         help='rewrite iteritems() and itervalues()',
     ),
     ap.add_argument(
-        '--allow-attr-methods',
-        action='store_true',
-        default=False,
-        help='also handle attr*() when they are methods',
-    ),
-    ap.add_argument(
         '--treat-as-kwargs',
         nargs="+",
         default=[],
@@ -328,7 +318,6 @@
     opts = {
         'dictiter': args.dictiter,
         'treat-as-kwargs': set(args.treat_as_kwargs),
-        'allow-attr-methods': args.allow_attr_methods,
     }
     for fname in args.files:
         fname = os.path.realpath(fname)
--- a/contrib/check-pytype.sh	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/check-pytype.sh	Mon Aug 07 11:08:00 2023 +0200
@@ -26,7 +26,6 @@
 # hgext/githelp.py              # [attribute-error] [wrong-arg-types]
 # hgext/hgk.py                  # [attribute-error]
 # hgext/histedit.py             # [attribute-error], [wrong-arg-types]
-# hgext/infinitepush            # using bytes for str literal; scheduled for removal
 # hgext/keyword.py              # [attribute-error]
 # hgext/largefiles/storefactory.py  # [attribute-error]
 # hgext/lfs/__init__.py         # [attribute-error]
@@ -88,7 +87,6 @@
     -x hgext/githelp.py \
     -x hgext/hgk.py \
     -x hgext/histedit.py \
-    -x hgext/infinitepush \
     -x hgext/keyword.py \
     -x hgext/largefiles/storefactory.py \
     -x hgext/lfs/__init__.py \
--- a/contrib/import-checker.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/import-checker.py	Mon Aug 07 11:08:00 2023 +0200
@@ -45,6 +45,7 @@
     'mercurial.thirdparty',
     'mercurial.thirdparty.attr',
     'mercurial.thirdparty.jaraco.collections',
+    'mercurial.thirdparty.tomli',
     'mercurial.thirdparty.zope',
     'mercurial.thirdparty.zope.interface',
     'typing',
--- a/contrib/merge-lists/Cargo.lock	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/merge-lists/Cargo.lock	Mon Aug 07 11:08:00 2023 +0200
@@ -12,6 +12,55 @@
 ]
 
 [[package]]
+name = "anstream"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is-terminal",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
+dependencies = [
+ "anstyle",
+ "windows-sys",
+]
+
+[[package]]
 name = "assert_cmd"
 version = "2.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -26,17 +75,6 @@
 ]
 
 [[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi",
- "libc",
- "winapi",
-]
-
-[[package]]
 name = "autocfg"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -44,9 +82,9 @@
 
 [[package]]
 name = "bitflags"
-version = "1.3.2"
+version = "2.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
 
 [[package]]
 name = "bstr"
@@ -60,36 +98,59 @@
 ]
 
 [[package]]
+name = "cc"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
+
+[[package]]
 name = "clap"
-version = "3.1.6"
+version = "4.3.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8c93436c21e4698bacadf42917db28b23017027a4deccb35dbe47a7e7840123"
+checksum = "5b0827b011f6f8ab38590295339817b0d26f344aa4932c3ced71b45b0c54b4a9"
 dependencies = [
- "atty",
- "bitflags",
+ "clap_builder",
  "clap_derive",
- "indexmap",
- "lazy_static",
- "os_str_bytes",
+ "once_cell",
+]
+
+[[package]]
+name = "clap_builder"
+version = "4.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9441b403be87be858db6a23edb493e7f694761acdc3343d5a0fcaafd304cbc9e"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "clap_lex",
  "strsim",
- "termcolor",
- "textwrap",
 ]
 
 [[package]]
 name = "clap_derive"
-version = "3.1.4"
+version = "4.3.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
+checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050"
 dependencies = [
  "heck",
- "proc-macro-error",
  "proc-macro2",
  "quote",
- "syn",
+ "syn 2.0.27",
 ]
 
 [[package]]
+name = "clap_lex"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
+
+[[package]]
+name = "colorchoice"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
+
+[[package]]
 name = "console"
 version = "0.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -127,6 +188,27 @@
 checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
 
 [[package]]
+name = "errno"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a"
+dependencies = [
+ "errno-dragonfly",
+ "libc",
+ "windows-sys",
+]
+
+[[package]]
+name = "errno-dragonfly"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
 name = "fuchsia-cprng"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -146,12 +228,9 @@
 
 [[package]]
 name = "hermit-abi"
-version = "0.1.19"
+version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
+checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
 
 [[package]]
 name = "indexmap"
@@ -178,6 +257,17 @@
 ]
 
 [[package]]
+name = "is-terminal"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
+dependencies = [
+ "hermit-abi",
+ "rustix",
+ "windows-sys",
+]
+
+[[package]]
 name = "itertools"
 version = "0.10.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -200,9 +290,9 @@
 
 [[package]]
 name = "libc"
-version = "0.2.119"
+version = "0.2.147"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
+checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
 
 [[package]]
 name = "linked-hash-map"
@@ -211,6 +301,12 @@
 checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
 
 [[package]]
+name = "linux-raw-sys"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
+
+[[package]]
 name = "memchr"
 version = "2.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -231,18 +327,9 @@
 
 [[package]]
 name = "once_cell"
-version = "1.10.0"
+version = "1.18.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
-
-[[package]]
-name = "os_str_bytes"
-version = "6.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
-dependencies = [
- "memchr",
-]
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
 
 [[package]]
 name = "predicates"
@@ -272,43 +359,19 @@
 ]
 
 [[package]]
-name = "proc-macro-error"
-version = "1.0.4"
+name = "proc-macro2"
+version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
 dependencies = [
- "proc-macro-error-attr",
- "proc-macro2",
- "quote",
- "syn",
- "version_check",
-]
-
-[[package]]
-name = "proc-macro-error-attr"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
-dependencies = [
- "proc-macro2",
- "quote",
- "version_check",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.36"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
-dependencies = [
- "unicode-xid",
+ "unicode-ident",
 ]
 
 [[package]]
 name = "quote"
-version = "1.0.15"
+version = "1.0.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
+checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
 dependencies = [
  "proc-macro2",
 ]
@@ -383,6 +446,19 @@
 ]
 
 [[package]]
+name = "rustix"
+version = "0.38.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys",
+]
+
+[[package]]
 name = "ryu"
 version = "1.0.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -405,7 +481,7 @@
 dependencies = [
  "proc-macro2",
  "quote",
- "syn",
+ "syn 1.0.87",
 ]
 
 [[package]]
@@ -458,6 +534,17 @@
 ]
 
 [[package]]
+name = "syn"
+version = "2.0.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
 name = "tempdir"
 version = "0.3.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -468,15 +555,6 @@
 ]
 
 [[package]]
-name = "termcolor"
-version = "1.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
-dependencies = [
- "winapi-util",
-]
-
-[[package]]
 name = "terminal_size"
 version = "0.1.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -493,10 +571,10 @@
 checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b"
 
 [[package]]
-name = "textwrap"
-version = "0.15.0"
+name = "unicode-ident"
+version = "1.0.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
+checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
 
 [[package]]
 name = "unicode-xid"
@@ -505,10 +583,10 @@
 checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
 
 [[package]]
-name = "version_check"
-version = "0.9.4"
+name = "utf8parse"
+version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
 
 [[package]]
 name = "wait-timeout"
@@ -536,21 +614,78 @@
 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
 
 [[package]]
-name = "winapi-util"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
-dependencies = [
- "winapi",
-]
-
-[[package]]
 name = "winapi-x86_64-pc-windows-gnu"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
 name = "yaml-rust"
 version = "0.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
--- a/contrib/merge-lists/Cargo.toml	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/merge-lists/Cargo.toml	Mon Aug 07 11:08:00 2023 +0200
@@ -10,7 +10,7 @@
 rust-version = "1.59"
 
 [dependencies]
-clap = { version = "3.1.6", features = ["derive"] }
+clap = { version = "4.3.17", features = ["derive"] }
 itertools = "0.10.3"
 regex = "1.5.5"
 similar = { version="2.1.0", features = ["bytes"] }
--- a/contrib/perf.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/perf.py	Mon Aug 07 11:08:00 2023 +0200
@@ -456,7 +456,7 @@
         return functools.partial(stub_timer, fm), fm
 
     # experimental config: perf.all-timing
-    displayall = ui.configbool(b"perf", b"all-timing", False)
+    displayall = ui.configbool(b"perf", b"all-timing", True)
 
     # experimental config: perf.run-limits
     limitspec = ui.configlist(b"perf", b"run-limits", [])
@@ -3359,7 +3359,7 @@
 
     # get a formatter
     fm = ui.formatter(b'perf', opts)
-    displayall = ui.configbool(b"perf", b"all-timing", False)
+    displayall = ui.configbool(b"perf", b"all-timing", True)
 
     # print individual details if requested
     if opts['details']:
@@ -4421,7 +4421,8 @@
                 )
             return orig(repo, cmd, file_, opts)
 
-        extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog)
+        name = _sysstr(b'openrevlog')
+        extensions.wrapfunction(cmdutil, name, openrevlog)
 
 
 @command(
--- a/contrib/python3-whitelist	Mon Aug 07 11:05:43 2023 +0200
+++ b/contrib/python3-whitelist	Mon Aug 07 11:08:00 2023 +0200
@@ -337,9 +337,6 @@
 test-import.t
 test-imports-checker.t
 test-incoming-outgoing.t
-test-infinitepush-bundlestore.t
-test-infinitepush-ci.t
-test-infinitepush.t
 test-inherit-mode.t
 test-init.t
 test-install.t
--- a/hgext/beautifygraph.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/beautifygraph.py	Mon Aug 07 11:08:00 2023 +0200
@@ -103,5 +103,5 @@
         )
         return
 
-    extensions.wrapfunction(graphmod, b'outputgraph', outputprettygraph)
-    extensions.wrapfunction(templatekw, b'getgraphnode', getprettygraphnode)
+    extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
+    extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode)
--- a/hgext/blackbox.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/blackbox.py	Mon Aug 07 11:08:00 2023 +0200
@@ -67,41 +67,6 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
-configtable = {}
-configitem = registrar.configitem(configtable)
-
-configitem(
-    b'blackbox',
-    b'dirty',
-    default=False,
-)
-configitem(
-    b'blackbox',
-    b'maxsize',
-    default=b'1 MB',
-)
-configitem(
-    b'blackbox',
-    b'logsource',
-    default=False,
-)
-configitem(
-    b'blackbox',
-    b'maxfiles',
-    default=7,
-)
-configitem(
-    b'blackbox',
-    b'track',
-    default=lambda: [b'*'],
-)
-configitem(
-    b'blackbox',
-    b'ignore',
-    default=lambda: [b'chgserver', b'cmdserver', b'extension'],
-)
-configitem(b'blackbox', b'date-format', default=b'')
-
 _lastlogger = loggingutil.proxylogger()
 
 
--- a/hgext/bookflow.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/bookflow.py	Mon Aug 07 11:08:00 2023 +0200
@@ -117,8 +117,8 @@
 
 
 def uisetup(ui):
-    extensions.wrapfunction(bookmarks, b'update', bookmarks_update)
-    extensions.wrapfunction(bookmarks, b'addbookmarks', bookmarks_addbookmarks)
+    extensions.wrapfunction(bookmarks, 'update', bookmarks_update)
+    extensions.wrapfunction(bookmarks, 'addbookmarks', bookmarks_addbookmarks)
     extensions.wrapcommand(commands.table, b'commit', commands_commit)
     extensions.wrapcommand(commands.table, b'pull', commands_pull)
     if not ui.configbool(MY_NAME, b'enable-branches'):
--- a/hgext/clonebundles.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/clonebundles.py	Mon Aug 07 11:08:00 2023 +0200
@@ -349,7 +349,7 @@
 
 
 def extsetup(ui):
-    extensions.wrapfunction(wireprotov1server, b'_capabilities', capabilities)
+    extensions.wrapfunction(wireprotov1server, '_capabilities', capabilities)
 
 
 # logic for bundle auto-generation
--- a/hgext/fastannotate/protocol.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/fastannotate/protocol.py	Mon Aug 07 11:08:00 2023 +0200
@@ -101,7 +101,7 @@
 
 def serveruisetup(ui):
     _registerwireprotocommand()
-    extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
+    extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
 
 
 # client-side
--- a/hgext/fastannotate/support.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/fastannotate/support.py	Mon Aug 07 11:08:00 2023 +0200
@@ -129,8 +129,8 @@
 
 
 def replacehgwebannotate():
-    extensions.wrapfunction(hgweb.webutil, b'annotate', _hgwebannotate)
+    extensions.wrapfunction(hgweb.webutil, 'annotate', _hgwebannotate)
 
 
 def replacefctxannotate():
-    extensions.wrapfunction(hgcontext.basefilectx, b'annotate', _fctxannotate)
+    extensions.wrapfunction(hgcontext.basefilectx, 'annotate', _fctxannotate)
--- a/hgext/fsmonitor/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/fsmonitor/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -755,9 +755,9 @@
     )
     if pycompat.isdarwin:
         # An assist for avoiding the dangling-symlink fsevents bug
-        extensions.wrapfunction(os, b'symlink', wrapsymlink)
+        extensions.wrapfunction(os, 'symlink', wrapsymlink)
 
-    extensions.wrapfunction(merge, b'_update', wrapupdate)
+    extensions.wrapfunction(merge, '_update', wrapupdate)
 
 
 def wrapsymlink(orig, source, link_name):
--- a/hgext/git/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/git/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -342,8 +342,8 @@
 
 
 def extsetup(ui):
-    extensions.wrapfunction(localrepo, b'makestore', _makestore)
-    extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage)
+    extensions.wrapfunction(localrepo, 'makestore', _makestore)
+    extensions.wrapfunction(localrepo, 'makefilestorage', _makefilestorage)
     # Inject --git flag for `hg init`
     entry = extensions.wrapcommand(commands.table, b'init', init)
     entry[1].extend(
--- a/hgext/git/dirstate.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/git/dirstate.py	Mon Aug 07 11:08:00 2023 +0200
@@ -47,7 +47,7 @@
     return result, warnings
 
 
-extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
+extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile)
 
 
 _STATUS_MAP = {}
--- a/hgext/highlight/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/highlight/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -101,8 +101,8 @@
 def extsetup(ui):
     # monkeypatch in the new version
     extensions.wrapfunction(
-        webcommands, b'_filerevision', filerevision_highlight
+        webcommands, '_filerevision', filerevision_highlight
     )
-    extensions.wrapfunction(webcommands, b'annotate', annotate_highlight)
+    extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
     webcommands.highlightcss = generate_css
     webcommands.__all__.append(b'highlightcss')
--- a/hgext/histedit.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/histedit.py	Mon Aug 07 11:08:00 2023 +0200
@@ -2652,7 +2652,7 @@
     return orig(ui, repo, nodelist, *args, **kwargs)
 
 
-extensions.wrapfunction(repair, b'strip', stripwrapper)
+extensions.wrapfunction(repair, 'strip', stripwrapper)
 
 
 def summaryhook(ui, repo):
--- a/hgext/infinitepush/README	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-## What is it?
-
-This extension adds ability to save certain pushes to a remote blob store
-as bundles and to serve commits from remote blob store.
-The revisions are stored on disk or in everstore.
-The metadata are stored in sql or on disk.
-
-## Config options
-
-infinitepush.branchpattern: pattern to detect a scratchbranch, example
-                            're:scratch/.+'
-
-infinitepush.indextype: disk or sql for the metadata
-infinitepush.reponame: only relevant for sql metadata backend, reponame to put in
-                       sql
-
-infinitepush.indexpath: only relevant for ondisk metadata backend, the path to
-                        store the index on disk. If not set will be under .hg
-                        in a folder named filebundlestore
-
-infinitepush.storepath: only relevant for ondisk metadata backend, the path to
-                        store the bundles. If not set, it will be
-                        .hg/filebundlestore
--- a/hgext/infinitepush/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1413 +0,0 @@
-# Infinite push
-#
-# Copyright 2016 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-""" store some pushes in a remote blob store on the server (EXPERIMENTAL)
-
-IMPORTANT: if you use this extension, please contact
-mercurial-devel@mercurial-scm.org ASAP. This extension is believed to
-be unused and barring learning of users of this functionality, we will
-delete this code at the end of 2020.
-
-    [infinitepush]
-    # Server-side and client-side option. Pattern of the infinitepush bookmark
-    branchpattern = PATTERN
-
-    # Server or client
-    server = False
-
-    # Server-side option. Possible values: 'disk' or 'sql'. Fails if not set
-    indextype = disk
-
-    # Server-side option. Used only if indextype=sql.
-    # Format: 'IP:PORT:DB_NAME:USER:PASSWORD'
-    sqlhost = IP:PORT:DB_NAME:USER:PASSWORD
-
-    # Server-side option. Used only if indextype=disk.
-    # Filesystem path to the index store
-    indexpath = PATH
-
-    # Server-side option. Possible values: 'disk' or 'external'
-    # Fails if not set
-    storetype = disk
-
-    # Server-side option.
-    # Path to the binary that will save bundle to the bundlestore
-    # Formatted cmd line will be passed to it (see `put_args`)
-    put_binary = put
-
-    # Serser-side option. Used only if storetype=external.
-    # Format cmd-line string for put binary. Placeholder: {filename}
-    put_args = {filename}
-
-    # Server-side option.
-    # Path to the binary that get bundle from the bundlestore.
-    # Formatted cmd line will be passed to it (see `get_args`)
-    get_binary = get
-
-    # Serser-side option. Used only if storetype=external.
-    # Format cmd-line string for get binary. Placeholders: {filename} {handle}
-    get_args = {filename} {handle}
-
-    # Server-side option
-    logfile = FIlE
-
-    # Server-side option
-    loglevel = DEBUG
-
-    # Server-side option. Used only if indextype=sql.
-    # Sets mysql wait_timeout option.
-    waittimeout = 300
-
-    # Server-side option. Used only if indextype=sql.
-    # Sets mysql innodb_lock_wait_timeout option.
-    locktimeout = 120
-
-    # Server-side option. Used only if indextype=sql.
-    # Name of the repository
-    reponame = ''
-
-    # Client-side option. Used by --list-remote option. List of remote scratch
-    # patterns to list if no patterns are specified.
-    defaultremotepatterns = ['*']
-
-    # Instructs infinitepush to forward all received bundle2 parts to the
-    # bundle for storage. Defaults to False.
-    storeallparts = True
-
-    # routes each incoming push to the bundlestore. defaults to False
-    pushtobundlestore = True
-
-    [remotenames]
-    # Client-side option
-    # This option should be set only if remotenames extension is enabled.
-    # Whether remote bookmarks are tracked by remotenames extension.
-    bookmarks = True
-"""
-
-
-import collections
-import contextlib
-import functools
-import logging
-import os
-import random
-import re
-import socket
-import subprocess
-import time
-
-from mercurial.node import (
-    bin,
-    hex,
-)
-
-from mercurial.i18n import _
-
-from mercurial.pycompat import (
-    getattr,
-    open,
-)
-
-from mercurial.utils import (
-    procutil,
-    stringutil,
-    urlutil,
-)
-
-from mercurial import (
-    bundle2,
-    changegroup,
-    commands,
-    discovery,
-    encoding,
-    error,
-    exchange,
-    extensions,
-    hg,
-    localrepo,
-    phases,
-    pushkey,
-    pycompat,
-    registrar,
-    util,
-    wireprototypes,
-    wireprotov1peer,
-    wireprotov1server,
-)
-
-from . import (
-    bundleparts,
-    common,
-)
-
-# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = b'ships-with-hg-core'
-
-configtable = {}
-configitem = registrar.configitem(configtable)
-
-configitem(
-    b'infinitepush',
-    b'deprecation-message',
-    default=True,
-)
-
-configitem(
-    b'infinitepush',
-    b'deprecation-abort',
-    default=True,
-)
-
-configitem(
-    b'infinitepush',
-    b'server',
-    default=False,
-)
-configitem(
-    b'infinitepush',
-    b'storetype',
-    default=b'',
-)
-configitem(
-    b'infinitepush',
-    b'indextype',
-    default=b'',
-)
-configitem(
-    b'infinitepush',
-    b'indexpath',
-    default=b'',
-)
-configitem(
-    b'infinitepush',
-    b'storeallparts',
-    default=False,
-)
-configitem(
-    b'infinitepush',
-    b'reponame',
-    default=b'',
-)
-configitem(
-    b'scratchbranch',
-    b'storepath',
-    default=b'',
-)
-configitem(
-    b'infinitepush',
-    b'branchpattern',
-    default=b'',
-)
-configitem(
-    b'infinitepush',
-    b'pushtobundlestore',
-    default=False,
-)
-configitem(
-    b'experimental',
-    b'server-bundlestore-bookmark',
-    default=b'',
-)
-configitem(
-    b'experimental',
-    b'infinitepush-scratchpush',
-    default=False,
-)
-
-experimental = b'experimental'
-configbookmark = b'server-bundlestore-bookmark'
-configscratchpush = b'infinitepush-scratchpush'
-
-scratchbranchparttype = bundleparts.scratchbranchparttype
-revsetpredicate = registrar.revsetpredicate()
-templatekeyword = registrar.templatekeyword()
-_scratchbranchmatcher = lambda x: False
-_maybehash = re.compile('^[a-f0-9]+$').search
-
-
-def _buildexternalbundlestore(ui):
-    put_args = ui.configlist(b'infinitepush', b'put_args', [])
-    put_binary = ui.config(b'infinitepush', b'put_binary')
-    if not put_binary:
-        raise error.Abort(b'put binary is not specified')
-    get_args = ui.configlist(b'infinitepush', b'get_args', [])
-    get_binary = ui.config(b'infinitepush', b'get_binary')
-    if not get_binary:
-        raise error.Abort(b'get binary is not specified')
-    from . import store
-
-    return store.externalbundlestore(put_binary, put_args, get_binary, get_args)
-
-
-def _buildsqlindex(ui):
-    sqlhost = ui.config(b'infinitepush', b'sqlhost')
-    if not sqlhost:
-        raise error.Abort(_(b'please set infinitepush.sqlhost'))
-    host, port, db, user, password = sqlhost.split(b':')
-    reponame = ui.config(b'infinitepush', b'reponame')
-    if not reponame:
-        raise error.Abort(_(b'please set infinitepush.reponame'))
-
-    logfile = ui.config(b'infinitepush', b'logfile', b'')
-    waittimeout = ui.configint(b'infinitepush', b'waittimeout', 300)
-    locktimeout = ui.configint(b'infinitepush', b'locktimeout', 120)
-    from . import sqlindexapi
-
-    return sqlindexapi.sqlindexapi(
-        reponame,
-        host,
-        port,
-        db,
-        user,
-        password,
-        logfile,
-        _getloglevel(ui),
-        waittimeout=waittimeout,
-        locktimeout=locktimeout,
-    )
-
-
-def _getloglevel(ui):
-    loglevel = ui.config(b'infinitepush', b'loglevel', b'DEBUG')
-    numeric_loglevel = getattr(logging, loglevel.upper(), None)
-    if not isinstance(numeric_loglevel, int):
-        raise error.Abort(_(b'invalid log level %s') % loglevel)
-    return numeric_loglevel
-
-
-def _tryhoist(ui, remotebookmark):
-    """returns a bookmarks with hoisted part removed
-
-    Remotenames extension has a 'hoist' config that allows to use remote
-    bookmarks without specifying remote path. For example, 'hg update master'
-    works as well as 'hg update remote/master'. We want to allow the same in
-    infinitepush.
-    """
-
-    if common.isremotebooksenabled(ui):
-        hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
-        if remotebookmark.startswith(hoist):
-            return remotebookmark[len(hoist) :]
-    return remotebookmark
-
-
-class bundlestore:
-    def __init__(self, repo):
-        self._repo = repo
-        storetype = self._repo.ui.config(b'infinitepush', b'storetype')
-        if storetype == b'disk':
-            from . import store
-
-            self.store = store.filebundlestore(self._repo.ui, self._repo)
-        elif storetype == b'external':
-            self.store = _buildexternalbundlestore(self._repo.ui)
-        else:
-            raise error.Abort(
-                _(b'unknown infinitepush store type specified %s') % storetype
-            )
-
-        indextype = self._repo.ui.config(b'infinitepush', b'indextype')
-        if indextype == b'disk':
-            from . import fileindexapi
-
-            self.index = fileindexapi.fileindexapi(self._repo)
-        elif indextype == b'sql':
-            self.index = _buildsqlindex(self._repo.ui)
-        else:
-            raise error.Abort(
-                _(b'unknown infinitepush index type specified %s') % indextype
-            )
-
-
-def _isserver(ui):
-    return ui.configbool(b'infinitepush', b'server')
-
-
-WARNING_MSG = b"""IMPORTANT: if you use this extension, please contact
-mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-unused and barring learning of users of this functionality, we drop this
-extension in Mercurial 6.6.
-"""
-
-
-def reposetup(ui, repo):
-    if ui.configbool(b'infinitepush', b'deprecation-message'):
-        ui.write_err(WARNING_MSG)
-    if ui.configbool(b'infinitepush', b'deprecation-abort'):
-        msg = b"USING EXTENSION INFINITE PUSH DESPITE PENDING DROP"
-        hint = b"contact mercurial-devel@mercurial-scm.org"
-        raise error.Abort(msg, hint=hint)
-    if _isserver(ui) and repo.local():
-        repo.bundlestore = bundlestore(repo)
-
-
-def extsetup(ui):
-    commonsetup(ui)
-    if _isserver(ui):
-        serverextsetup(ui)
-    else:
-        clientextsetup(ui)
-
-
-def uipopulate(ui):
-    if not ui.hasconfig(b"experimental", b"changegroup3"):
-        ui.setconfig(b"experimental", b"changegroup3", False, b"infinitepush")
-
-
-def commonsetup(ui):
-    wireprotov1server.commands[b'listkeyspatterns'] = (
-        wireprotolistkeyspatterns,
-        b'namespace patterns',
-    )
-    scratchbranchpat = ui.config(b'infinitepush', b'branchpattern')
-    if scratchbranchpat:
-        global _scratchbranchmatcher
-        kind, pat, _scratchbranchmatcher = stringutil.stringmatcher(
-            scratchbranchpat
-        )
-
-
-def serverextsetup(ui):
-    origpushkeyhandler = bundle2.parthandlermapping[b'pushkey']
-
-    def newpushkeyhandler(*args, **kwargs):
-        bundle2pushkey(origpushkeyhandler, *args, **kwargs)
-
-    newpushkeyhandler.params = origpushkeyhandler.params
-    bundle2.parthandlermapping[b'pushkey'] = newpushkeyhandler
-
-    orighandlephasehandler = bundle2.parthandlermapping[b'phase-heads']
-    newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases(
-        orighandlephasehandler, *args, **kwargs
-    )
-    newphaseheadshandler.params = orighandlephasehandler.params
-    bundle2.parthandlermapping[b'phase-heads'] = newphaseheadshandler
-
-    extensions.wrapfunction(
-        localrepo.localrepository, b'listkeys', localrepolistkeys
-    )
-    wireprotov1server.commands[b'lookup'] = (
-        _lookupwrap(wireprotov1server.commands[b'lookup'][0]),
-        b'key',
-    )
-    extensions.wrapfunction(exchange, b'getbundlechunks', getbundlechunks)
-
-    extensions.wrapfunction(bundle2, b'processparts', processparts)
-
-
-def clientextsetup(ui):
-    entry = extensions.wrapcommand(commands.table, b'push', _push)
-
-    entry[1].append(
-        (
-            b'',
-            b'bundle-store',
-            None,
-            _(b'force push to go to bundle store (EXPERIMENTAL)'),
-        )
-    )
-
-    extensions.wrapcommand(commands.table, b'pull', _pull)
-
-    extensions.wrapfunction(discovery, b'checkheads', _checkheads)
-
-    wireprotov1peer.wirepeer.listkeyspatterns = listkeyspatterns
-
-    partorder = exchange.b2partsgenorder
-    index = partorder.index(b'changeset')
-    partorder.insert(
-        index, partorder.pop(partorder.index(scratchbranchparttype))
-    )
-
-
-def _checkheads(orig, pushop):
-    if pushop.ui.configbool(experimental, configscratchpush, False):
-        return
-    return orig(pushop)
-
-
-def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
-    patterns = wireprototypes.decodelist(patterns)
-    d = repo.listkeys(encoding.tolocal(namespace), patterns).items()
-    return pushkey.encodekeys(d)
-
-
-def localrepolistkeys(orig, self, namespace, patterns=None):
-    if namespace == b'bookmarks' and patterns:
-        index = self.bundlestore.index
-        results = {}
-        bookmarks = orig(self, namespace)
-        for pattern in patterns:
-            results.update(index.getbookmarks(pattern))
-            if pattern.endswith(b'*'):
-                pattern = b're:^' + pattern[:-1] + b'.*'
-            kind, pat, matcher = stringutil.stringmatcher(pattern)
-            for bookmark, node in bookmarks.items():
-                if matcher(bookmark):
-                    results[bookmark] = node
-        return results
-    else:
-        return orig(self, namespace)
-
-
-@wireprotov1peer.batchable
-def listkeyspatterns(self, namespace, patterns):
-    if not self.capable(b'pushkey'):
-        return {}, None
-    self.ui.debug(b'preparing listkeys for "%s"\n' % namespace)
-
-    def decode(d):
-        self.ui.debug(
-            b'received listkey for "%s": %i bytes\n' % (namespace, len(d))
-        )
-        return pushkey.decodekeys(d)
-
-    return {
-        b'namespace': encoding.fromlocal(namespace),
-        b'patterns': wireprototypes.encodelist(patterns),
-    }, decode
-
-
-def _readbundlerevs(bundlerepo):
-    return list(bundlerepo.revs(b'bundle()'))
-
-
-def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
-    """Tells remotefilelog to include all changed files to the changegroup
-
-    By default remotefilelog doesn't include file content to the changegroup.
-    But we need to include it if we are fetching from bundlestore.
-    """
-    changedfiles = set()
-    cl = bundlerepo.changelog
-    for r in bundlerevs:
-        # [3] means changed files
-        changedfiles.update(cl.read(r)[3])
-    if not changedfiles:
-        return bundlecaps
-
-    changedfiles = b'\0'.join(changedfiles)
-    newcaps = []
-    appended = False
-    for cap in bundlecaps or []:
-        if cap.startswith(b'excludepattern='):
-            newcaps.append(b'\0'.join((cap, changedfiles)))
-            appended = True
-        else:
-            newcaps.append(cap)
-    if not appended:
-        # Not found excludepattern cap. Just append it
-        newcaps.append(b'excludepattern=' + changedfiles)
-
-    return newcaps
-
-
-def _rebundle(bundlerepo, bundleroots, unknownhead):
-    """
-    Bundle may include more revision then user requested. For example,
-    if user asks for revision but bundle also consists its descendants.
-    This function will filter out all revision that user is not requested.
-    """
-    parts = []
-
-    version = b'02'
-    outgoing = discovery.outgoing(
-        bundlerepo, commonheads=bundleroots, ancestorsof=[unknownhead]
-    )
-    cgstream = changegroup.makestream(bundlerepo, outgoing, version, b'pull')
-    cgstream = util.chunkbuffer(cgstream).read()
-    cgpart = bundle2.bundlepart(b'changegroup', data=cgstream)
-    cgpart.addparam(b'version', version)
-    parts.append(cgpart)
-
-    return parts
-
-
-def _getbundleroots(oldrepo, bundlerepo, bundlerevs):
-    cl = bundlerepo.changelog
-    bundleroots = []
-    for rev in bundlerevs:
-        node = cl.node(rev)
-        parents = cl.parents(node)
-        for parent in parents:
-            # include all revs that exist in the main repo
-            # to make sure that bundle may apply client-side
-            if parent in oldrepo:
-                bundleroots.append(parent)
-    return bundleroots
-
-
-def _needsrebundling(head, bundlerepo):
-    bundleheads = list(bundlerepo.revs(b'heads(bundle())'))
-    return not (
-        len(bundleheads) == 1 and bundlerepo[bundleheads[0]].node() == head
-    )
-
-
-def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
-    """generates bundle that will be send to the user
-
-    returns tuple with raw bundle string and bundle type
-    """
-    parts = []
-    if not _needsrebundling(head, bundlerepo):
-        with util.posixfile(bundlefile, b"rb") as f:
-            unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile)
-            if isinstance(unbundler, changegroup.cg1unpacker):
-                part = bundle2.bundlepart(
-                    b'changegroup', data=unbundler._stream.read()
-                )
-                part.addparam(b'version', b'01')
-                parts.append(part)
-            elif isinstance(unbundler, bundle2.unbundle20):
-                haschangegroup = False
-                for part in unbundler.iterparts():
-                    if part.type == b'changegroup':
-                        haschangegroup = True
-                    newpart = bundle2.bundlepart(part.type, data=part.read())
-                    for key, value in part.params.items():
-                        newpart.addparam(key, value)
-                    parts.append(newpart)
-
-                if not haschangegroup:
-                    raise error.Abort(
-                        b'unexpected bundle without changegroup part, '
-                        + b'head: %s' % hex(head),
-                        hint=b'report to administrator',
-                    )
-            else:
-                raise error.Abort(b'unknown bundle type')
-    else:
-        parts = _rebundle(bundlerepo, bundleroots, head)
-
-    return parts
-
-
-def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
-    heads = heads or []
-    # newheads are parents of roots of scratch bundles that were requested
-    newphases = {}
-    scratchbundles = []
-    newheads = []
-    scratchheads = []
-    nodestobundle = {}
-    allbundlestocleanup = []
-    try:
-        for head in heads:
-            if not repo.changelog.index.has_node(head):
-                if head not in nodestobundle:
-                    newbundlefile = common.downloadbundle(repo, head)
-                    bundlepath = b"bundle:%s+%s" % (repo.root, newbundlefile)
-                    bundlerepo = hg.repository(repo.ui, bundlepath)
-
-                    allbundlestocleanup.append((bundlerepo, newbundlefile))
-                    bundlerevs = set(_readbundlerevs(bundlerepo))
-                    bundlecaps = _includefilelogstobundle(
-                        bundlecaps, bundlerepo, bundlerevs, repo.ui
-                    )
-                    cl = bundlerepo.changelog
-                    bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
-                    for rev in bundlerevs:
-                        node = cl.node(rev)
-                        newphases[hex(node)] = str(phases.draft)
-                        nodestobundle[node] = (
-                            bundlerepo,
-                            bundleroots,
-                            newbundlefile,
-                        )
-
-                scratchbundles.append(
-                    _generateoutputparts(head, *nodestobundle[head])
-                )
-                newheads.extend(bundleroots)
-                scratchheads.append(head)
-    finally:
-        for bundlerepo, bundlefile in allbundlestocleanup:
-            bundlerepo.close()
-            try:
-                os.unlink(bundlefile)
-            except (IOError, OSError):
-                # if we can't cleanup the file then just ignore the error,
-                # no need to fail
-                pass
-
-    pullfrombundlestore = bool(scratchbundles)
-    wrappedchangegrouppart = False
-    wrappedlistkeys = False
-    oldchangegrouppart = exchange.getbundle2partsmapping[b'changegroup']
-    try:
-
-        def _changegrouppart(bundler, *args, **kwargs):
-            # Order is important here. First add non-scratch part
-            # and only then add parts with scratch bundles because
-            # non-scratch part contains parents of roots of scratch bundles.
-            result = oldchangegrouppart(bundler, *args, **kwargs)
-            for bundle in scratchbundles:
-                for part in bundle:
-                    bundler.addpart(part)
-            return result
-
-        exchange.getbundle2partsmapping[b'changegroup'] = _changegrouppart
-        wrappedchangegrouppart = True
-
-        def _listkeys(orig, self, namespace):
-            origvalues = orig(self, namespace)
-            if namespace == b'phases' and pullfrombundlestore:
-                if origvalues.get(b'publishing') == b'True':
-                    # Make repo non-publishing to preserve draft phase
-                    del origvalues[b'publishing']
-                origvalues.update(newphases)
-            return origvalues
-
-        extensions.wrapfunction(
-            localrepo.localrepository, b'listkeys', _listkeys
-        )
-        wrappedlistkeys = True
-        heads = list((set(newheads) | set(heads)) - set(scratchheads))
-        result = orig(
-            repo, source, heads=heads, bundlecaps=bundlecaps, **kwargs
-        )
-    finally:
-        if wrappedchangegrouppart:
-            exchange.getbundle2partsmapping[b'changegroup'] = oldchangegrouppart
-        if wrappedlistkeys:
-            extensions.unwrapfunction(
-                localrepo.localrepository, b'listkeys', _listkeys
-            )
-    return result
-
-
-def _lookupwrap(orig):
-    def _lookup(repo, proto, key):
-        localkey = encoding.tolocal(key)
-
-        if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
-            scratchnode = repo.bundlestore.index.getnode(localkey)
-            if scratchnode:
-                return b"%d %s\n" % (1, scratchnode)
-            else:
-                return b"%d %s\n" % (
-                    0,
-                    b'scratch branch %s not found' % localkey,
-                )
-        else:
-            try:
-                r = hex(repo.lookup(localkey))
-                return b"%d %s\n" % (1, r)
-            except Exception as inst:
-                if repo.bundlestore.index.getbundle(localkey):
-                    return b"%d %s\n" % (1, localkey)
-                else:
-                    r = stringutil.forcebytestr(inst)
-                    return b"%d %s\n" % (0, r)
-
-    return _lookup
-
-
-def _pull(orig, ui, repo, source=b"default", **opts):
-    opts = pycompat.byteskwargs(opts)
-    # Copy paste from `pull` command
-    path = urlutil.get_unique_pull_path_obj(
-        b"infinite-push's pull",
-        ui,
-        source,
-    )
-
-    scratchbookmarks = {}
-    unfi = repo.unfiltered()
-    unknownnodes = []
-    for rev in opts.get(b'rev', []):
-        if rev not in unfi:
-            unknownnodes.append(rev)
-    if opts.get(b'bookmark'):
-        bookmarks = []
-        revs = opts.get(b'rev') or []
-        for bookmark in opts.get(b'bookmark'):
-            if _scratchbranchmatcher(bookmark):
-                # rev is not known yet
-                # it will be fetched with listkeyspatterns next
-                scratchbookmarks[bookmark] = b'REVTOFETCH'
-            else:
-                bookmarks.append(bookmark)
-
-        if scratchbookmarks:
-            other = hg.peer(repo, opts, path)
-            try:
-                fetchedbookmarks = other.listkeyspatterns(
-                    b'bookmarks', patterns=scratchbookmarks
-                )
-                for bookmark in scratchbookmarks:
-                    if bookmark not in fetchedbookmarks:
-                        raise error.Abort(
-                            b'remote bookmark %s not found!' % bookmark
-                        )
-                    scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
-                    revs.append(fetchedbookmarks[bookmark])
-            finally:
-                other.close()
-        opts[b'bookmark'] = bookmarks
-        opts[b'rev'] = revs
-
-    if scratchbookmarks or unknownnodes:
-        # Set anyincoming to True
-        extensions.wrapfunction(
-            discovery, b'findcommonincoming', _findcommonincoming
-        )
-    try:
-        # Remote scratch bookmarks will be deleted because remotenames doesn't
-        # know about them. Let's save it before pull and restore after
-        remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, path.loc)
-        result = orig(ui, repo, path.loc, **pycompat.strkwargs(opts))
-        # TODO(stash): race condition is possible
-        # if scratch bookmarks was updated right after orig.
-        # But that's unlikely and shouldn't be harmful.
-        if common.isremotebooksenabled(ui):
-            remotescratchbookmarks.update(scratchbookmarks)
-            _saveremotebookmarks(repo, remotescratchbookmarks, path.loc)
-        else:
-            _savelocalbookmarks(repo, scratchbookmarks)
-        return result
-    finally:
-        if scratchbookmarks:
-            extensions.unwrapfunction(discovery, b'findcommonincoming')
-
-
-def _readscratchremotebookmarks(ui, repo, other):
-    if common.isremotebooksenabled(ui):
-        remotenamesext = extensions.find(b'remotenames')
-        remotepath = remotenamesext.activepath(repo.ui, other)
-        result = {}
-        # Let's refresh remotenames to make sure we have it up to date
-        # Seems that `repo.names['remotebookmarks']` may return stale bookmarks
-        # and it results in deleting scratch bookmarks. Our best guess how to
-        # fix it is to use `clearnames()`
-        repo._remotenames.clearnames()
-        for remotebookmark in repo.names[b'remotebookmarks'].listnames(repo):
-            path, bookname = remotenamesext.splitremotename(remotebookmark)
-            if path == remotepath and _scratchbranchmatcher(bookname):
-                nodes = repo.names[b'remotebookmarks'].nodes(
-                    repo, remotebookmark
-                )
-                if nodes:
-                    result[bookname] = hex(nodes[0])
-        return result
-    else:
-        return {}
-
-
-def _saveremotebookmarks(repo, newbookmarks, remote):
-    remotenamesext = extensions.find(b'remotenames')
-    remotepath = remotenamesext.activepath(repo.ui, remote)
-    branches = collections.defaultdict(list)
-    bookmarks = {}
-    remotenames = remotenamesext.readremotenames(repo)
-    for hexnode, nametype, remote, rname in remotenames:
-        if remote != remotepath:
-            continue
-        if nametype == b'bookmarks':
-            if rname in newbookmarks:
-                # It's possible if we have a normal bookmark that matches
-                # scratch branch pattern. In this case just use the current
-                # bookmark node
-                del newbookmarks[rname]
-            bookmarks[rname] = hexnode
-        elif nametype == b'branches':
-            # saveremotenames expects 20 byte binary nodes for branches
-            branches[rname].append(bin(hexnode))
-
-    for bookmark, hexnode in newbookmarks.items():
-        bookmarks[bookmark] = hexnode
-    remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
-
-
-def _savelocalbookmarks(repo, bookmarks):
-    if not bookmarks:
-        return
-    with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
-        changes = []
-        for scratchbook, node in bookmarks.items():
-            changectx = repo[node]
-            changes.append((scratchbook, changectx.node()))
-        repo._bookmarks.applychanges(repo, tr, changes)
-
-
-def _findcommonincoming(orig, *args, **kwargs):
-    common, inc, remoteheads = orig(*args, **kwargs)
-    return common, True, remoteheads
-
-
-def _push(orig, ui, repo, *dests, **opts):
-    opts = pycompat.byteskwargs(opts)
-    bookmark = opts.get(b'bookmark')
-    # we only support pushing one infinitepush bookmark at once
-    if len(bookmark) == 1:
-        bookmark = bookmark[0]
-    else:
-        bookmark = b''
-
-    oldphasemove = None
-    overrides = {(experimental, configbookmark): bookmark}
-
-    with ui.configoverride(overrides, b'infinitepush'):
-        scratchpush = opts.get(b'bundle_store')
-        if _scratchbranchmatcher(bookmark):
-            scratchpush = True
-            # bundle2 can be sent back after push (for example, bundle2
-            # containing `pushkey` part to update bookmarks)
-            ui.setconfig(experimental, b'bundle2.pushback', True)
-
-        if scratchpush:
-            # this is an infinitepush, we don't want the bookmark to be applied
-            # rather that should be stored in the bundlestore
-            opts[b'bookmark'] = []
-            ui.setconfig(experimental, configscratchpush, True)
-            oldphasemove = extensions.wrapfunction(
-                exchange, b'_localphasemove', _phasemove
-            )
-
-        paths = list(urlutil.get_push_paths(repo, ui, dests))
-        if len(paths) > 1:
-            msg = _(b'cannot push to multiple path with infinitepush')
-            raise error.Abort(msg)
-
-        path = paths[0]
-        destpath = path.loc
-        # Remote scratch bookmarks will be deleted because remotenames doesn't
-        # know about them. Let's save it before push and restore after
-        remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath)
-        result = orig(ui, repo, *dests, **pycompat.strkwargs(opts))
-        if common.isremotebooksenabled(ui):
-            if bookmark and scratchpush:
-                other = hg.peer(repo, opts, path)
-                try:
-                    fetchedbookmarks = other.listkeyspatterns(
-                        b'bookmarks', patterns=[bookmark]
-                    )
-                    remotescratchbookmarks.update(fetchedbookmarks)
-                finally:
-                    other.close()
-            _saveremotebookmarks(repo, remotescratchbookmarks, destpath)
-    if oldphasemove:
-        exchange._localphasemove = oldphasemove
-    return result
-
-
-def _deleteinfinitepushbookmarks(ui, repo, path, names):
-    """Prune remote names by removing the bookmarks we don't want anymore,
-    then writing the result back to disk
-    """
-    remotenamesext = extensions.find(b'remotenames')
-
-    # remotename format is:
-    # (node, nametype ("branches" or "bookmarks"), remote, name)
-    nametype_idx = 1
-    remote_idx = 2
-    name_idx = 3
-    remotenames = [
-        remotename
-        for remotename in remotenamesext.readremotenames(repo)
-        if remotename[remote_idx] == path
-    ]
-    remote_bm_names = [
-        remotename[name_idx]
-        for remotename in remotenames
-        if remotename[nametype_idx] == b"bookmarks"
-    ]
-
-    for name in names:
-        if name not in remote_bm_names:
-            raise error.Abort(
-                _(
-                    b"infinitepush bookmark '{}' does not exist "
-                    b"in path '{}'"
-                ).format(name, path)
-            )
-
-    bookmarks = {}
-    branches = collections.defaultdict(list)
-    for node, nametype, remote, name in remotenames:
-        if nametype == b"bookmarks" and name not in names:
-            bookmarks[name] = node
-        elif nametype == b"branches":
-            # saveremotenames wants binary nodes for branches
-            branches[name].append(bin(node))
-
-    remotenamesext.saveremotenames(repo, path, branches, bookmarks)
-
-
-def _phasemove(orig, pushop, nodes, phase=phases.public):
-    """prevent commits from being marked public
-
-    Since these are going to a scratch branch, they aren't really being
-    published."""
-
-    if phase != phases.public:
-        orig(pushop, nodes, phase)
-
-
-@exchange.b2partsgenerator(scratchbranchparttype)
-def partgen(pushop, bundler):
-    bookmark = pushop.ui.config(experimental, configbookmark)
-    scratchpush = pushop.ui.configbool(experimental, configscratchpush)
-    if b'changesets' in pushop.stepsdone or not scratchpush:
-        return
-
-    if scratchbranchparttype not in bundle2.bundle2caps(pushop.remote):
-        return
-
-    pushop.stepsdone.add(b'changesets')
-    if not pushop.outgoing.missing:
-        pushop.ui.status(_(b'no changes found\n'))
-        pushop.cgresult = 0
-        return
-
-    # This parameter tells the server that the following bundle is an
-    # infinitepush. This let's it switch the part processing to our infinitepush
-    # code path.
-    bundler.addparam(b"infinitepush", b"True")
-
-    scratchparts = bundleparts.getscratchbranchparts(
-        pushop.repo, pushop.remote, pushop.outgoing, pushop.ui, bookmark
-    )
-
-    for scratchpart in scratchparts:
-        bundler.addpart(scratchpart)
-
-    def handlereply(op):
-        # server either succeeds or aborts; no code to read
-        pushop.cgresult = 1
-
-    return handlereply
-
-
-bundle2.capabilities[bundleparts.scratchbranchparttype] = ()
-
-
-def _getrevs(bundle, oldnode, force, bookmark):
-    b'extracts and validates the revs to be imported'
-    revs = [bundle[r] for r in bundle.revs(b'sort(bundle())')]
-
-    # new bookmark
-    if oldnode is None:
-        return revs
-
-    # Fast forward update
-    if oldnode in bundle and list(bundle.set(b'bundle() & %s::', oldnode)):
-        return revs
-
-    return revs
-
-
-@contextlib.contextmanager
-def logservicecall(logger, service, **kwargs):
-    start = time.time()
-    logger(service, eventtype=b'start', **kwargs)
-    try:
-        yield
-        logger(
-            service,
-            eventtype=b'success',
-            elapsedms=(time.time() - start) * 1000,
-            **kwargs
-        )
-    except Exception as e:
-        logger(
-            service,
-            eventtype=b'failure',
-            elapsedms=(time.time() - start) * 1000,
-            errormsg=stringutil.forcebytestr(e),
-            **kwargs
-        )
-        raise
-
-
-def _getorcreateinfinitepushlogger(op):
-    logger = op.records[b'infinitepushlogger']
-    if not logger:
-        ui = op.repo.ui
-        try:
-            username = procutil.getuser()
-        except Exception:
-            username = b'unknown'
-        # Generate random request id to be able to find all logged entries
-        # for the same request. Since requestid is pseudo-generated it may
-        # not be unique, but we assume that (hostname, username, requestid)
-        # is unique.
-        random.seed()
-        requestid = random.randint(0, 2000000000)
-        hostname = socket.gethostname()
-        logger = functools.partial(
-            ui.log,
-            b'infinitepush',
-            user=username,
-            requestid=requestid,
-            hostname=hostname,
-            reponame=ui.config(b'infinitepush', b'reponame'),
-        )
-        op.records.add(b'infinitepushlogger', logger)
-    else:
-        logger = logger[0]
-    return logger
-
-
-def storetobundlestore(orig, repo, op, unbundler):
-    """stores the incoming bundle coming from push command to the bundlestore
-    instead of applying on the revlogs"""
-
-    repo.ui.status(_(b"storing changesets on the bundlestore\n"))
-    bundler = bundle2.bundle20(repo.ui)
-
-    # processing each part and storing it in bundler
-    with bundle2.partiterator(repo, op, unbundler) as parts:
-        for part in parts:
-            bundlepart = None
-            if part.type == b'replycaps':
-                # This configures the current operation to allow reply parts.
-                bundle2._processpart(op, part)
-            else:
-                bundlepart = bundle2.bundlepart(part.type, data=part.read())
-                for key, value in part.params.items():
-                    bundlepart.addparam(key, value)
-
-                # Certain parts require a response
-                if part.type in (b'pushkey', b'changegroup'):
-                    if op.reply is not None:
-                        rpart = op.reply.newpart(b'reply:%s' % part.type)
-                        rpart.addparam(
-                            b'in-reply-to', b'%d' % part.id, mandatory=False
-                        )
-                        rpart.addparam(b'return', b'1', mandatory=False)
-
-            op.records.add(
-                part.type,
-                {
-                    b'return': 1,
-                },
-            )
-            if bundlepart:
-                bundler.addpart(bundlepart)
-
-    # storing the bundle in the bundlestore
-    buf = util.chunkbuffer(bundler.getchunks())
-    fd, bundlefile = pycompat.mkstemp()
-    try:
-        try:
-            fp = os.fdopen(fd, 'wb')
-            fp.write(buf.read())
-        finally:
-            fp.close()
-        storebundle(op, {}, bundlefile)
-    finally:
-        try:
-            os.unlink(bundlefile)
-        except Exception:
-            # we would rather see the original exception
-            pass
-
-
-def processparts(orig, repo, op, unbundler):
-
-    # make sure we don't wrap processparts in case of `hg unbundle`
-    if op.source == b'unbundle':
-        return orig(repo, op, unbundler)
-
-    # this server routes each push to bundle store
-    if repo.ui.configbool(b'infinitepush', b'pushtobundlestore'):
-        return storetobundlestore(orig, repo, op, unbundler)
-
-    if unbundler.params.get(b'infinitepush') != b'True':
-        return orig(repo, op, unbundler)
-
-    handleallparts = repo.ui.configbool(b'infinitepush', b'storeallparts')
-
-    bundler = bundle2.bundle20(repo.ui)
-    cgparams = None
-    with bundle2.partiterator(repo, op, unbundler) as parts:
-        for part in parts:
-            bundlepart = None
-            if part.type == b'replycaps':
-                # This configures the current operation to allow reply parts.
-                bundle2._processpart(op, part)
-            elif part.type == bundleparts.scratchbranchparttype:
-                # Scratch branch parts need to be converted to normal
-                # changegroup parts, and the extra parameters stored for later
-                # when we upload to the store. Eventually those parameters will
-                # be put on the actual bundle instead of this part, then we can
-                # send a vanilla changegroup instead of the scratchbranch part.
-                cgversion = part.params.get(b'cgversion', b'01')
-                bundlepart = bundle2.bundlepart(
-                    b'changegroup', data=part.read()
-                )
-                bundlepart.addparam(b'version', cgversion)
-                cgparams = part.params
-
-                # If we're not dumping all parts into the new bundle, we need to
-                # alert the future pushkey and phase-heads handler to skip
-                # the part.
-                if not handleallparts:
-                    op.records.add(
-                        scratchbranchparttype + b'_skippushkey', True
-                    )
-                    op.records.add(
-                        scratchbranchparttype + b'_skipphaseheads', True
-                    )
-            else:
-                if handleallparts:
-                    # Ideally we would not process any parts, and instead just
-                    # forward them to the bundle for storage, but since this
-                    # differs from previous behavior, we need to put it behind a
-                    # config flag for incremental rollout.
-                    bundlepart = bundle2.bundlepart(part.type, data=part.read())
-                    for key, value in part.params.items():
-                        bundlepart.addparam(key, value)
-
-                    # Certain parts require a response
-                    if part.type == b'pushkey':
-                        if op.reply is not None:
-                            rpart = op.reply.newpart(b'reply:pushkey')
-                            rpart.addparam(
-                                b'in-reply-to', str(part.id), mandatory=False
-                            )
-                            rpart.addparam(b'return', b'1', mandatory=False)
-                else:
-                    bundle2._processpart(op, part)
-
-            if handleallparts:
-                op.records.add(
-                    part.type,
-                    {
-                        b'return': 1,
-                    },
-                )
-            if bundlepart:
-                bundler.addpart(bundlepart)
-
-    # If commits were sent, store them
-    if cgparams:
-        buf = util.chunkbuffer(bundler.getchunks())
-        fd, bundlefile = pycompat.mkstemp()
-        try:
-            try:
-                fp = os.fdopen(fd, 'wb')
-                fp.write(buf.read())
-            finally:
-                fp.close()
-            storebundle(op, cgparams, bundlefile)
-        finally:
-            try:
-                os.unlink(bundlefile)
-            except Exception:
-                # we would rather see the original exception
-                pass
-
-
-def storebundle(op, params, bundlefile):
-    log = _getorcreateinfinitepushlogger(op)
-    parthandlerstart = time.time()
-    log(scratchbranchparttype, eventtype=b'start')
-    index = op.repo.bundlestore.index
-    store = op.repo.bundlestore.store
-    op.records.add(scratchbranchparttype + b'_skippushkey', True)
-
-    bundle = None
-    try:  # guards bundle
-        bundlepath = b"bundle:%s+%s" % (op.repo.root, bundlefile)
-        bundle = hg.repository(op.repo.ui, bundlepath)
-
-        bookmark = params.get(b'bookmark')
-        bookprevnode = params.get(b'bookprevnode', b'')
-        force = params.get(b'force')
-
-        if bookmark:
-            oldnode = index.getnode(bookmark)
-        else:
-            oldnode = None
-        bundleheads = bundle.revs(b'heads(bundle())')
-        if bookmark and len(bundleheads) > 1:
-            raise error.Abort(
-                _(b'cannot push more than one head to a scratch branch')
-            )
-
-        revs = _getrevs(bundle, oldnode, force, bookmark)
-
-        # Notify the user of what is being pushed
-        plural = b's' if len(revs) > 1 else b''
-        op.repo.ui.warn(_(b"pushing %d commit%s:\n") % (len(revs), plural))
-        maxoutput = 10
-        for i in range(0, min(len(revs), maxoutput)):
-            firstline = bundle[revs[i]].description().split(b'\n')[0][:50]
-            op.repo.ui.warn(b"    %s  %s\n" % (revs[i], firstline))
-
-        if len(revs) > maxoutput + 1:
-            op.repo.ui.warn(b"    ...\n")
-            firstline = bundle[revs[-1]].description().split(b'\n')[0][:50]
-            op.repo.ui.warn(b"    %s  %s\n" % (revs[-1], firstline))
-
-        nodesctx = [bundle[rev] for rev in revs]
-        inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
-        if bundleheads:
-            newheadscount = sum(not inindex(rev) for rev in bundleheads)
-        else:
-            newheadscount = 0
-        # If there's a bookmark specified, there should be only one head,
-        # so we choose the last node, which will be that head.
-        # If a bug or malicious client allows there to be a bookmark
-        # with multiple heads, we will place the bookmark on the last head.
-        bookmarknode = nodesctx[-1].hex() if nodesctx else None
-        key = None
-        if newheadscount:
-            with open(bundlefile, b'rb') as f:
-                bundledata = f.read()
-                with logservicecall(
-                    log, b'bundlestore', bundlesize=len(bundledata)
-                ):
-                    bundlesizelimit = 100 * 1024 * 1024  # 100 MB
-                    if len(bundledata) > bundlesizelimit:
-                        error_msg = (
-                            b'bundle is too big: %d bytes. '
-                            + b'max allowed size is 100 MB'
-                        )
-                        raise error.Abort(error_msg % (len(bundledata),))
-                    key = store.write(bundledata)
-
-        with logservicecall(log, b'index', newheadscount=newheadscount), index:
-            if key:
-                index.addbundle(key, nodesctx)
-            if bookmark:
-                index.addbookmark(bookmark, bookmarknode)
-                _maybeaddpushbackpart(
-                    op, bookmark, bookmarknode, bookprevnode, params
-                )
-        log(
-            scratchbranchparttype,
-            eventtype=b'success',
-            elapsedms=(time.time() - parthandlerstart) * 1000,
-        )
-
-    except Exception as e:
-        log(
-            scratchbranchparttype,
-            eventtype=b'failure',
-            elapsedms=(time.time() - parthandlerstart) * 1000,
-            errormsg=stringutil.forcebytestr(e),
-        )
-        raise
-    finally:
-        if bundle:
-            bundle.close()
-
-
-@bundle2.parthandler(
-    scratchbranchparttype,
-    (
-        b'bookmark',
-        b'bookprevnode',
-        b'force',
-        b'pushbackbookmarks',
-        b'cgversion',
-    ),
-)
-def bundle2scratchbranch(op, part):
-    '''unbundle a bundle2 part containing a changegroup to store'''
-
-    bundler = bundle2.bundle20(op.repo.ui)
-    cgversion = part.params.get(b'cgversion', b'01')
-    cgpart = bundle2.bundlepart(b'changegroup', data=part.read())
-    cgpart.addparam(b'version', cgversion)
-    bundler.addpart(cgpart)
-    buf = util.chunkbuffer(bundler.getchunks())
-
-    fd, bundlefile = pycompat.mkstemp()
-    try:
-        try:
-            fp = os.fdopen(fd, 'wb')
-            fp.write(buf.read())
-        finally:
-            fp.close()
-        storebundle(op, part.params, bundlefile)
-    finally:
-        try:
-            os.unlink(bundlefile)
-        except FileNotFoundError:
-            pass
-
-    return 1
-
-
-def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params):
-    if params.get(b'pushbackbookmarks'):
-        if op.reply and b'pushback' in op.reply.capabilities:
-            params = {
-                b'namespace': b'bookmarks',
-                b'key': bookmark,
-                b'new': newnode,
-                b'old': oldnode,
-            }
-            op.reply.newpart(b'pushkey', mandatoryparams=params.items())
-
-
-def bundle2pushkey(orig, op, part):
-    """Wrapper of bundle2.handlepushkey()
-
-    The only goal is to skip calling the original function if flag is set.
-    It's set if infinitepush push is happening.
-    """
-    if op.records[scratchbranchparttype + b'_skippushkey']:
-        if op.reply is not None:
-            rpart = op.reply.newpart(b'reply:pushkey')
-            rpart.addparam(b'in-reply-to', str(part.id), mandatory=False)
-            rpart.addparam(b'return', b'1', mandatory=False)
-        return 1
-
-    return orig(op, part)
-
-
-def bundle2handlephases(orig, op, part):
-    """Wrapper of bundle2.handlephases()
-
-    The only goal is to skip calling the original function if flag is set.
-    It's set if infinitepush push is happening.
-    """
-
-    if op.records[scratchbranchparttype + b'_skipphaseheads']:
-        return
-
-    return orig(op, part)
-
-
-def _asyncsavemetadata(root, nodes):
-    """starts a separate process that fills metadata for the nodes
-
-    This function creates a separate process and doesn't wait for it's
-    completion. This was done to avoid slowing down pushes
-    """
-
-    maxnodes = 50
-    if len(nodes) > maxnodes:
-        return
-    nodesargs = []
-    for node in nodes:
-        nodesargs.append(b'--node')
-        nodesargs.append(node)
-    with open(os.devnull, b'w+b') as devnull:
-        cmdline = [
-            util.hgexecutable(),
-            b'debugfillinfinitepushmetadata',
-            b'-R',
-            root,
-        ] + nodesargs
-        # Process will run in background. We don't care about the return code
-        subprocess.Popen(
-            pycompat.rapply(procutil.tonativestr, cmdline),
-            close_fds=True,
-            shell=False,
-            stdin=devnull,
-            stdout=devnull,
-            stderr=devnull,
-        )
--- a/hgext/infinitepush/bundleparts.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,126 +0,0 @@
-# Copyright 2017 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-
-from mercurial.i18n import _
-from mercurial.node import hex
-
-from mercurial import (
-    bundle2,
-    changegroup,
-    error,
-    extensions,
-    revsetlang,
-    util,
-)
-
-from . import common
-
-isremotebooksenabled = common.isremotebooksenabled
-
-scratchbranchparttype = b'b2x:infinitepush'
-
-
-def getscratchbranchparts(repo, peer, outgoing, ui, bookmark):
-    if not outgoing.missing:
-        raise error.Abort(_(b'no commits to push'))
-
-    if scratchbranchparttype not in bundle2.bundle2caps(peer):
-        raise error.Abort(
-            _(b'no server support for %r') % scratchbranchparttype
-        )
-
-    _validaterevset(
-        repo, revsetlang.formatspec(b'%ln', outgoing.missing), bookmark
-    )
-
-    supportedversions = changegroup.supportedoutgoingversions(repo)
-    # Explicitly avoid using '01' changegroup version in infinitepush to
-    # support general delta
-    supportedversions.discard(b'01')
-    cgversion = min(supportedversions)
-    _handlelfs(repo, outgoing.missing)
-    cg = changegroup.makestream(repo, outgoing, cgversion, b'push')
-
-    params = {}
-    params[b'cgversion'] = cgversion
-    if bookmark:
-        params[b'bookmark'] = bookmark
-        # 'prevbooknode' is necessary for pushkey reply part
-        params[b'bookprevnode'] = b''
-        bookmarks = repo._bookmarks
-        if bookmark in bookmarks:
-            params[b'bookprevnode'] = hex(bookmarks[bookmark])
-
-    # Do not send pushback bundle2 part with bookmarks if remotenames extension
-    # is enabled. It will be handled manually in `_push()`
-    if not isremotebooksenabled(ui):
-        params[b'pushbackbookmarks'] = b'1'
-
-    parts = []
-
-    # .upper() marks this as a mandatory part: server will abort if there's no
-    #  handler
-    parts.append(
-        bundle2.bundlepart(
-            scratchbranchparttype.upper(),
-            advisoryparams=params.items(),
-            data=cg,
-        )
-    )
-
-    return parts
-
-
-def _validaterevset(repo, revset, bookmark):
-    """Abort if the revs to be pushed aren't valid for a scratch branch."""
-    if not repo.revs(revset):
-        raise error.Abort(_(b'nothing to push'))
-    if bookmark:
-        # Allow bundle with many heads only if no bookmark is specified
-        heads = repo.revs(b'heads(%r)', revset)
-        if len(heads) > 1:
-            raise error.Abort(
-                _(b'cannot push more than one head to a scratch branch')
-            )
-
-
-def _handlelfs(repo, missing):
-    """Special case if lfs is enabled
-
-    If lfs is enabled then we need to call prepush hook
-    to make sure large files are uploaded to lfs
-    """
-    try:
-        lfsmod = extensions.find(b'lfs')
-        lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
-    except KeyError:
-        # Ignore if lfs extension is not enabled
-        return
-
-
-class copiedpart:
-    """a copy of unbundlepart content that can be consumed later"""
-
-    def __init__(self, part):
-        # copy "public properties"
-        self.type = part.type
-        self.id = part.id
-        self.mandatory = part.mandatory
-        self.mandatoryparams = part.mandatoryparams
-        self.advisoryparams = part.advisoryparams
-        self.params = part.params
-        self.mandatorykeys = part.mandatorykeys
-        # copy the buffer
-        self._io = util.stringio(part.read())
-
-    def consume(self):
-        return
-
-    def read(self, size=None):
-        if size is None:
-            return self._io.read()
-        else:
-            return self._io.read(size)
--- a/hgext/infinitepush/common.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,51 +0,0 @@
-# Copyright 2017 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-
-import os
-
-from mercurial.node import hex
-
-from mercurial import (
-    error,
-    extensions,
-    pycompat,
-)
-
-
-def isremotebooksenabled(ui):
-    return b'remotenames' in extensions._extensions and ui.configbool(
-        b'remotenames', b'bookmarks'
-    )
-
-
-def downloadbundle(repo, unknownbinhead):
-    index = repo.bundlestore.index
-    store = repo.bundlestore.store
-    bundleid = index.getbundle(hex(unknownbinhead))
-    if bundleid is None:
-        raise error.Abort(b'%s head is not known' % hex(unknownbinhead))
-    bundleraw = store.read(bundleid)
-    return _makebundlefromraw(bundleraw)
-
-
-def _makebundlefromraw(data):
-    fp = None
-    fd, bundlefile = pycompat.mkstemp()
-    try:  # guards bundlefile
-        try:  # guards fp
-            fp = os.fdopen(fd, 'wb')
-            fp.write(data)
-        finally:
-            fp.close()
-    except Exception:
-        try:
-            os.unlink(bundlefile)
-        except Exception:
-            # we would rather see the original exception
-            pass
-        raise
-
-    return bundlefile
--- a/hgext/infinitepush/fileindexapi.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,110 +0,0 @@
-# Infinite push
-#
-# Copyright 2016 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-"""
-    [infinitepush]
-    # Server-side option. Used only if indextype=disk.
-    # Filesystem path to the index store
-    indexpath = PATH
-"""
-
-
-import os
-
-from mercurial import util
-
-from mercurial.utils import stringutil
-
-from . import indexapi
-
-
-class fileindexapi(indexapi.indexapi):
-    def __init__(self, repo):
-        super(fileindexapi, self).__init__()
-        self._repo = repo
-        root = repo.ui.config(b'infinitepush', b'indexpath')
-        if not root:
-            root = os.path.join(b'scratchbranches', b'index')
-
-        self._nodemap = os.path.join(root, b'nodemap')
-        self._bookmarkmap = os.path.join(root, b'bookmarkmap')
-        self._metadatamap = os.path.join(root, b'nodemetadatamap')
-        self._lock = None
-
-    def __enter__(self):
-        self._lock = self._repo.wlock()
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        if self._lock:
-            self._lock.__exit__(exc_type, exc_val, exc_tb)
-
-    def addbundle(self, bundleid, nodesctx):
-        for node in nodesctx:
-            nodepath = os.path.join(self._nodemap, node.hex())
-            self._write(nodepath, bundleid)
-
-    def addbookmark(self, bookmark, node):
-        bookmarkpath = os.path.join(self._bookmarkmap, bookmark)
-        self._write(bookmarkpath, node)
-
-    def addmanybookmarks(self, bookmarks):
-        for bookmark, node in bookmarks.items():
-            self.addbookmark(bookmark, node)
-
-    def deletebookmarks(self, patterns):
-        for pattern in patterns:
-            for bookmark, _ in self._listbookmarks(pattern):
-                bookmarkpath = os.path.join(self._bookmarkmap, bookmark)
-                self._delete(bookmarkpath)
-
-    def getbundle(self, node):
-        nodepath = os.path.join(self._nodemap, node)
-        return self._read(nodepath)
-
-    def getnode(self, bookmark):
-        bookmarkpath = os.path.join(self._bookmarkmap, bookmark)
-        return self._read(bookmarkpath)
-
-    def getbookmarks(self, query):
-        return dict(self._listbookmarks(query))
-
-    def saveoptionaljsonmetadata(self, node, jsonmetadata):
-        vfs = self._repo.vfs
-        vfs.write(os.path.join(self._metadatamap, node), jsonmetadata)
-
-    def _listbookmarks(self, pattern):
-        if pattern.endswith(b'*'):
-            pattern = b're:^' + pattern[:-1] + b'.*'
-        kind, pat, matcher = stringutil.stringmatcher(pattern)
-        prefixlen = len(self._bookmarkmap) + 1
-        for dirpath, _, books in self._repo.vfs.walk(self._bookmarkmap):
-            for book in books:
-                bookmark = os.path.join(dirpath, book)[prefixlen:]
-                bookmark = util.pconvert(bookmark)
-                if not matcher(bookmark):
-                    continue
-                yield bookmark, self._read(os.path.join(dirpath, book))
-
-    def _write(self, path, value):
-        vfs = self._repo.vfs
-        dirname = vfs.dirname(path)
-        if not vfs.exists(dirname):
-            vfs.makedirs(dirname)
-
-        vfs.write(path, value)
-
-    def _read(self, path):
-        vfs = self._repo.vfs
-        if not vfs.exists(path):
-            return None
-        return vfs.read(path)
-
-    def _delete(self, path):
-        vfs = self._repo.vfs
-        if not vfs.exists(path):
-            return
-        return vfs.unlink(path)
--- a/hgext/infinitepush/indexapi.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-# Infinite push
-#
-# Copyright 2016 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-
-class indexapi:
-    """Class that manages access to infinitepush index.
-
-    This class is a context manager and all write operations (like
-    deletebookmarks, addbookmark etc) should use `with` statement:
-
-      with index:
-          index.deletebookmarks(...)
-          ...
-    """
-
-    def __init__(self):
-        """Initializes the metadata store connection."""
-
-    def close(self):
-        """Cleans up the metadata store connection."""
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        pass
-
-    def addbundle(self, bundleid, nodesctx):
-        """Takes a bundleid and a list of node contexts for each node
-        in that bundle and records that."""
-        raise NotImplementedError()
-
-    def addbookmark(self, bookmark, node):
-        """Takes a bookmark name and hash, and records mapping in the metadata
-        store."""
-        raise NotImplementedError()
-
-    def addmanybookmarks(self, bookmarks):
-        """Takes a dict with mapping from bookmark to hash and records mapping
-        in the metadata store."""
-        raise NotImplementedError()
-
-    def deletebookmarks(self, patterns):
-        """Accepts list of bookmarks and deletes them."""
-        raise NotImplementedError()
-
-    def getbundle(self, node):
-        """Returns the bundleid for the bundle that contains the given node."""
-        raise NotImplementedError()
-
-    def getnode(self, bookmark):
-        """Returns the node for the given bookmark. None if it doesn't exist."""
-        raise NotImplementedError()
-
-    def getbookmarks(self, query):
-        """Returns bookmarks that match the query"""
-        raise NotImplementedError()
-
-    def saveoptionaljsonmetadata(self, node, jsonmetadata):
-        """Saves optional metadata for a given node"""
-        raise NotImplementedError()
-
-
-class indexexception(Exception):
-    pass
--- a/hgext/infinitepush/schema.sql	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,33 +0,0 @@
-CREATE TABLE `bookmarkstonode` (
-  `node` varbinary(64) NOT NULL,
-  `bookmark` varbinary(512) NOT NULL,
-  `reponame` varbinary(255) NOT NULL,
-  PRIMARY KEY (`reponame`,`bookmark`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `bundles` (
-  `bundle` varbinary(512) NOT NULL,
-  `reponame` varbinary(255) NOT NULL,
-  PRIMARY KEY (`bundle`,`reponame`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `nodestobundle` (
-  `node` varbinary(64) NOT NULL,
-  `bundle` varbinary(512) NOT NULL,
-  `reponame` varbinary(255) NOT NULL,
-  PRIMARY KEY (`node`,`reponame`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `nodesmetadata` (
-  `node` varbinary(64) NOT NULL,
-  `message` mediumblob NOT NULL,
-  `p1` varbinary(64) NOT NULL,
-  `p2` varbinary(64) DEFAULT NULL,
-  `author` varbinary(255) NOT NULL,
-  `committer` varbinary(255) DEFAULT NULL,
-  `author_date` bigint(20) NOT NULL,
-  `committer_date` bigint(20) DEFAULT NULL,
-  `reponame` varbinary(255) NOT NULL,
-  `optional_json_metadata` mediumblob,
-  PRIMARY KEY (`reponame`,`node`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--- a/hgext/infinitepush/sqlindexapi.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,295 +0,0 @@
-# Infinite push
-#
-# Copyright 2016 Facebook, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-
-import logging
-import os
-import time
-
-import warnings
-import mysql.connector
-
-from . import indexapi
-
-
-def _convertbookmarkpattern(pattern):
-    pattern = pattern.replace(b'_', b'\\_')
-    pattern = pattern.replace(b'%', b'\\%')
-    if pattern.endswith(b'*'):
-        pattern = pattern[:-1] + b'%'
-    return pattern
-
-
-class sqlindexapi(indexapi.indexapi):
-    """
-    Sql backend for infinitepush index. See schema.sql
-    """
-
-    def __init__(
-        self,
-        reponame,
-        host,
-        port,
-        database,
-        user,
-        password,
-        logfile,
-        loglevel,
-        waittimeout=300,
-        locktimeout=120,
-    ):
-        super(sqlindexapi, self).__init__()
-        self.reponame = reponame
-        self.sqlargs = {
-            b'host': host,
-            b'port': port,
-            b'database': database,
-            b'user': user,
-            b'password': password,
-        }
-        self.sqlconn = None
-        self.sqlcursor = None
-        if not logfile:
-            logfile = os.devnull
-        logging.basicConfig(filename=logfile)
-        self.log = logging.getLogger()
-        self.log.setLevel(loglevel)
-        self._connected = False
-        self._waittimeout = waittimeout
-        self._locktimeout = locktimeout
-
-    def sqlconnect(self):
-        if self.sqlconn:
-            raise indexapi.indexexception(b"SQL connection already open")
-        if self.sqlcursor:
-            raise indexapi.indexexception(
-                b"SQL cursor already open without connection"
-            )
-        retry = 3
-        while True:
-            try:
-                self.sqlconn = mysql.connector.connect(**self.sqlargs)
-
-                # Code is copy-pasted from hgsql. Bug fixes need to be
-                # back-ported!
-                # The default behavior is to return byte arrays, when we
-                # need strings. This custom convert returns strings.
-                self.sqlconn.set_converter_class(CustomConverter)
-                self.sqlconn.autocommit = False
-                break
-            except mysql.connector.errors.Error:
-                # mysql can be flakey occasionally, so do some minimal
-                # retrying.
-                retry -= 1
-                if retry == 0:
-                    raise
-                time.sleep(0.2)
-
-        waittimeout = self.sqlconn.converter.escape(b'%s' % self._waittimeout)
-
-        self.sqlcursor = self.sqlconn.cursor()
-        self.sqlcursor.execute(b"SET wait_timeout=%s" % waittimeout)
-        self.sqlcursor.execute(
-            b"SET innodb_lock_wait_timeout=%s" % self._locktimeout
-        )
-        self._connected = True
-
-    def close(self):
-        """Cleans up the metadata store connection."""
-        with warnings.catch_warnings():
-            warnings.simplefilter(b"ignore")
-            self.sqlcursor.close()
-            self.sqlconn.close()
-        self.sqlcursor = None
-        self.sqlconn = None
-
-    def __enter__(self):
-        if not self._connected:
-            self.sqlconnect()
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        if exc_type is None:
-            self.sqlconn.commit()
-        else:
-            self.sqlconn.rollback()
-
-    def addbundle(self, bundleid, nodesctx):
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid))
-        self.sqlcursor.execute(
-            b"INSERT INTO bundles(bundle, reponame) VALUES (%s, %s)",
-            params=(bundleid, self.reponame),
-        )
-        for ctx in nodesctx:
-            self.sqlcursor.execute(
-                b"INSERT INTO nodestobundle(node, bundle, reponame) "
-                b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE "
-                b"bundle=VALUES(bundle)",
-                params=(ctx.hex(), bundleid, self.reponame),
-            )
-
-            extra = ctx.extra()
-            author_name = ctx.user()
-            committer_name = extra.get(b'committer', ctx.user())
-            author_date = int(ctx.date()[0])
-            committer_date = int(extra.get(b'committer_date', author_date))
-            self.sqlcursor.execute(
-                b"INSERT IGNORE INTO nodesmetadata(node, message, p1, p2, "
-                b"author, committer, author_date, committer_date, "
-                b"reponame) VALUES "
-                b"(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
-                params=(
-                    ctx.hex(),
-                    ctx.description(),
-                    ctx.p1().hex(),
-                    ctx.p2().hex(),
-                    author_name,
-                    committer_name,
-                    author_date,
-                    committer_date,
-                    self.reponame,
-                ),
-            )
-
-    def addbookmark(self, bookmark, node):
-        """Takes a bookmark name and hash, and records mapping in the metadata
-        store."""
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(
-            b"ADD BOOKMARKS %r bookmark: %r node: %r"
-            % (self.reponame, bookmark, node)
-        )
-        self.sqlcursor.execute(
-            b"INSERT INTO bookmarkstonode(bookmark, node, reponame) "
-            b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE node=VALUES(node)",
-            params=(bookmark, node, self.reponame),
-        )
-
-    def addmanybookmarks(self, bookmarks):
-        if not self._connected:
-            self.sqlconnect()
-        args = []
-        values = []
-        for bookmark, node in bookmarks.items():
-            args.append(b'(%s, %s, %s)')
-            values.extend((bookmark, node, self.reponame))
-        args = b','.join(args)
-
-        self.sqlcursor.execute(
-            b"INSERT INTO bookmarkstonode(bookmark, node, reponame) "
-            b"VALUES %s ON DUPLICATE KEY UPDATE node=VALUES(node)" % args,
-            params=values,
-        )
-
-    def deletebookmarks(self, patterns):
-        """Accepts list of bookmark patterns and deletes them.
-        If `commit` is set then bookmark will actually be deleted. Otherwise
-        deletion will be delayed until the end of transaction.
-        """
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(b"DELETE BOOKMARKS: %s" % patterns)
-        for pattern in patterns:
-            pattern = _convertbookmarkpattern(pattern)
-            self.sqlcursor.execute(
-                b"DELETE from bookmarkstonode WHERE bookmark LIKE (%s) "
-                b"and reponame = %s",
-                params=(pattern, self.reponame),
-            )
-
-    def getbundle(self, node):
-        """Returns the bundleid for the bundle that contains the given node."""
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(b"GET BUNDLE %r %r" % (self.reponame, node))
-        self.sqlcursor.execute(
-            b"SELECT bundle from nodestobundle "
-            b"WHERE node = %s AND reponame = %s",
-            params=(node, self.reponame),
-        )
-        result = self.sqlcursor.fetchall()
-        if len(result) != 1 or len(result[0]) != 1:
-            self.log.info(b"No matching node")
-            return None
-        bundle = result[0][0]
-        self.log.info(b"Found bundle %r" % bundle)
-        return bundle
-
-    def getnode(self, bookmark):
-        """Returns the node for the given bookmark. None if it doesn't exist."""
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(
-            b"GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark)
-        )
-        self.sqlcursor.execute(
-            b"SELECT node from bookmarkstonode WHERE "
-            b"bookmark = %s AND reponame = %s",
-            params=(bookmark, self.reponame),
-        )
-        result = self.sqlcursor.fetchall()
-        if len(result) != 1 or len(result[0]) != 1:
-            self.log.info(b"No matching bookmark")
-            return None
-        node = result[0][0]
-        self.log.info(b"Found node %r" % node)
-        return node
-
-    def getbookmarks(self, query):
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(
-            b"QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query)
-        )
-        query = _convertbookmarkpattern(query)
-        self.sqlcursor.execute(
-            b"SELECT bookmark, node from bookmarkstonode WHERE "
-            b"reponame = %s AND bookmark LIKE %s",
-            params=(self.reponame, query),
-        )
-        result = self.sqlcursor.fetchall()
-        bookmarks = {}
-        for row in result:
-            if len(row) != 2:
-                self.log.info(b"Bad row returned: %s" % row)
-                continue
-            bookmarks[row[0]] = row[1]
-        return bookmarks
-
-    def saveoptionaljsonmetadata(self, node, jsonmetadata):
-        if not self._connected:
-            self.sqlconnect()
-        self.log.info(
-            (
-                b"INSERT METADATA, QUERY BOOKMARKS reponame: %r "
-                + b"node: %r, jsonmetadata: %s"
-            )
-            % (self.reponame, node, jsonmetadata)
-        )
-
-        self.sqlcursor.execute(
-            b"UPDATE nodesmetadata SET optional_json_metadata=%s WHERE "
-            b"reponame=%s AND node=%s",
-            params=(jsonmetadata, self.reponame, node),
-        )
-
-
-class CustomConverter(mysql.connector.conversion.MySQLConverter):
-    """Ensure that all values being returned are returned as python string
-    (versus the default byte arrays)."""
-
-    def _STRING_to_python(self, value, dsc=None):
-        return str(value)
-
-    def _VAR_STRING_to_python(self, value, dsc=None):
-        return str(value)
-
-    def _BLOB_to_python(self, value, dsc=None):
-        return str(value)
--- a/hgext/infinitepush/store.py	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,194 +0,0 @@
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-# based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
-
-
-import abc
-import os
-import subprocess
-
-from mercurial.node import hex
-from mercurial.pycompat import open
-from mercurial import pycompat
-from mercurial.utils import (
-    hashutil,
-    procutil,
-)
-
-
-class BundleWriteException(Exception):
-    pass
-
-
-class BundleReadException(Exception):
-    pass
-
-
-class abstractbundlestore:  # pytype: disable=ignored-metaclass
-    """Defines the interface for bundle stores.
-
-    A bundle store is an entity that stores raw bundle data. It is a simple
-    key-value store. However, the keys are chosen by the store. The keys can
-    be any Python object understood by the corresponding bundle index (see
-    ``abstractbundleindex`` below).
-    """
-
-    __metaclass__ = abc.ABCMeta
-
-    @abc.abstractmethod
-    def write(self, data):
-        """Write bundle data to the store.
-
-        This function receives the raw data to be written as a str.
-        Throws BundleWriteException
-        The key of the written data MUST be returned.
-        """
-
-    @abc.abstractmethod
-    def read(self, key):
-        """Obtain bundle data for a key.
-
-        Returns None if the bundle isn't known.
-        Throws BundleReadException
-        The returned object should be a file object supporting read()
-        and close().
-        """
-
-
-class filebundlestore:
-    """bundle store in filesystem
-
-    meant for storing bundles somewhere on disk and on network filesystems
-    """
-
-    def __init__(self, ui, repo):
-        self.ui = ui
-        self.repo = repo
-        self.storepath = ui.configpath(b'scratchbranch', b'storepath')
-        if not self.storepath:
-            self.storepath = self.repo.vfs.join(
-                b"scratchbranches", b"filebundlestore"
-            )
-        if not os.path.exists(self.storepath):
-            os.makedirs(self.storepath)
-
-    def _dirpath(self, hashvalue):
-        """First two bytes of the hash are the name of the upper
-        level directory, next two bytes are the name of the
-        next level directory"""
-        return os.path.join(self.storepath, hashvalue[0:2], hashvalue[2:4])
-
-    def _filepath(self, filename):
-        return os.path.join(self._dirpath(filename), filename)
-
-    def write(self, data):
-        filename = hex(hashutil.sha1(data).digest())
-        dirpath = self._dirpath(filename)
-
-        if not os.path.exists(dirpath):
-            os.makedirs(dirpath)
-
-        with open(self._filepath(filename), b'wb') as f:
-            f.write(data)
-
-        return filename
-
-    def read(self, key):
-        try:
-            with open(self._filepath(key), b'rb') as f:
-                return f.read()
-        except IOError:
-            return None
-
-
-def format_placeholders_args(args, filename=None, handle=None):
-    """Formats `args` with Infinitepush replacements.
-
-    Hack to get `str.format()`-ed strings working in a BC way with
-    bytes.
-    """
-    formatted_args = []
-    for arg in args:
-        if filename and arg == b'{filename}':
-            formatted_args.append(filename)
-        elif handle and arg == b'{handle}':
-            formatted_args.append(handle)
-        else:
-            formatted_args.append(arg)
-    return formatted_args
-
-
-class externalbundlestore(abstractbundlestore):
-    def __init__(self, put_binary, put_args, get_binary, get_args):
-        """
-        `put_binary` - path to binary file which uploads bundle to external
-            storage and prints key to stdout
-        `put_args` - format string with additional args to `put_binary`
-                     {filename} replacement field can be used.
-        `get_binary` - path to binary file which accepts filename and key
-            (in that order), downloads bundle from store and saves it to file
-        `get_args` - format string with additional args to `get_binary`.
-                     {filename} and {handle} replacement field can be used.
-        """
-
-        self.put_args = put_args
-        self.get_args = get_args
-        self.put_binary = put_binary
-        self.get_binary = get_binary
-
-    def _call_binary(self, args):
-        p = subprocess.Popen(
-            pycompat.rapply(procutil.tonativestr, args),
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            close_fds=True,
-        )
-        stdout, stderr = p.communicate()
-        returncode = p.returncode
-        return returncode, stdout, stderr
-
-    def write(self, data):
-        # Won't work on windows because you can't open file second time without
-        # closing it
-        # TODO: rewrite without str.format() and replace NamedTemporaryFile()
-        # with pycompat.namedtempfile()
-        with pycompat.namedtempfile() as temp:
-            temp.write(data)
-            temp.flush()
-            temp.seek(0)
-            formatted_args = format_placeholders_args(
-                self.put_args, filename=temp.name
-            )
-            returncode, stdout, stderr = self._call_binary(
-                [self.put_binary] + formatted_args
-            )
-
-            if returncode != 0:
-                raise BundleWriteException(
-                    b'Failed to upload to external store: %s' % stderr
-                )
-            stdout_lines = stdout.splitlines()
-            if len(stdout_lines) == 1:
-                return stdout_lines[0]
-            else:
-                raise BundleWriteException(
-                    b'Bad output from %s: %s' % (self.put_binary, stdout)
-                )
-
-    def read(self, handle):
-        # Won't work on windows because you can't open file second time without
-        # closing it
-        with pycompat.namedtempfile() as temp:
-            formatted_args = format_placeholders_args(
-                self.get_args, filename=temp.name, handle=handle
-            )
-            returncode, stdout, stderr = self._call_binary(
-                [self.get_binary] + formatted_args
-            )
-
-            if returncode != 0:
-                raise BundleReadException(
-                    b'Failed to download from external store: %s' % stderr
-                )
-            return temp.read()
--- a/hgext/journal.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/journal.py	Mon Aug 07 11:08:00 2023 +0200
@@ -66,13 +66,13 @@
 
 # Journal recording, register hooks and storage object
 def extsetup(ui):
-    extensions.wrapfunction(dispatch, b'runcommand', runcommand)
-    extensions.wrapfunction(bookmarks.bmstore, b'_write', recordbookmarks)
+    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
+    extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
     extensions.wrapfilecache(
         localrepo.localrepository, b'dirstate', wrapdirstate
     )
-    extensions.wrapfunction(hg, b'postshare', wrappostshare)
-    extensions.wrapfunction(hg, b'copystore', unsharejournal)
+    extensions.wrapfunction(hg, 'postshare', wrappostshare)
+    extensions.wrapfunction(hg, 'copystore', unsharejournal)
 
 
 def reposetup(ui, repo):
@@ -127,7 +127,10 @@
     repo = store._repo
     if util.safehasattr(repo, 'journal'):
         oldmarks = bookmarks.bmstore(repo)
-        for mark, value in store.items():
+        all_marks = set(b for b, n in oldmarks.items())
+        all_marks.update(b for b, n in store.items())
+        for mark in sorted(all_marks):
+            value = store.get(mark, repo.nullid)
             oldvalue = oldmarks.get(mark, repo.nullid)
             if value != oldvalue:
                 repo.journal.record(bookmarktype, mark, oldvalue, value)
--- a/hgext/keyword.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/keyword.py	Mon Aug 07 11:08:00 2023 +0200
@@ -131,7 +131,7 @@
 )
 
 # webcommands that do not act on keywords
-nokwwebcommands = b'annotate changeset rev filediff diff comparison'
+nokwwebcommands = 'annotate changeset rev filediff diff comparison'
 
 # hg commands that trigger expansion only when writing to working dir,
 # not when reading filelog, and unexpand when reading from working dir
@@ -806,14 +806,14 @@
         kwtools[b'hgcmd'] = cmd
         return cmd, func, args, options, cmdoptions
 
-    extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse)
+    extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
 
-    extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp)
-    extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init)
-    extensions.wrapfunction(patch, b'diff', kwdiff)
-    extensions.wrapfunction(cmdutil, b'amend', kw_amend)
-    extensions.wrapfunction(cmdutil, b'copy', kw_copy)
-    extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord)
+    extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
+    extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
+    extensions.wrapfunction(patch, 'diff', kwdiff)
+    extensions.wrapfunction(cmdutil, 'amend', kw_amend)
+    extensions.wrapfunction(cmdutil, 'copy', kw_copy)
+    extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
     for c in nokwwebcommands.split():
         extensions.wrapfunction(webcommands, c, kwweb_skip)
 
--- a/hgext/largefiles/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/largefiles/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -184,7 +184,7 @@
     )
 
     extensions.wrapfunction(
-        wireprotov1server.commands[b'heads'], b'func', proto.heads
+        wireprotov1server.commands[b'heads'], 'func', proto.heads
     )
     # TODO also wrap wireproto.commandsv2 once heads is implemented there.
 
@@ -193,7 +193,7 @@
         if name == b'rebase':
             # TODO: teach exthelper to handle this
             extensions.wrapfunction(
-                module, b'rebase', overrides.overriderebasecmd
+                module, 'rebase', overrides.overriderebasecmd
             )
 
 
--- a/hgext/largefiles/overrides.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/largefiles/overrides.py	Mon Aug 07 11:08:00 2023 +0200
@@ -243,7 +243,7 @@
 
 # For overriding mercurial.hgweb.webcommands so that largefiles will
 # appear at their right place in the manifests.
-@eh.wrapfunction(webcommands, b'decodepath')
+@eh.wrapfunction(webcommands, 'decodepath')
 def decodepath(orig, path):
     return lfutil.splitstandin(path) or path
 
@@ -273,7 +273,7 @@
     return orig(ui, repo, *pats, **opts)
 
 
-@eh.wrapfunction(cmdutil, b'add')
+@eh.wrapfunction(cmdutil, 'add')
 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
     # The --normal flag short circuits this override
     if opts.get('normal'):
@@ -289,7 +289,7 @@
     return bad
 
 
-@eh.wrapfunction(cmdutil, b'remove')
+@eh.wrapfunction(cmdutil, 'remove')
 def cmdutilremove(
     orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
 ):
@@ -313,7 +313,7 @@
     )
 
 
-@eh.wrapfunction(dirstate.dirstate, b'_changing')
+@eh.wrapfunction(dirstate.dirstate, '_changing')
 @contextlib.contextmanager
 def _changing(orig, self, repo, change_type):
     pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
@@ -334,7 +334,7 @@
         self._sub_dirstate = pre
 
 
-@eh.wrapfunction(dirstate.dirstate, b'running_status')
+@eh.wrapfunction(dirstate.dirstate, 'running_status')
 @contextlib.contextmanager
 def running_status(orig, self, repo):
     pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
@@ -355,7 +355,7 @@
         self._sub_dirstate = pre
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, b'status')
+@eh.wrapfunction(subrepo.hgsubrepo, 'status')
 def overridestatusfn(orig, repo, rev2, **opts):
     with lfstatus(repo._repo):
         return orig(repo, rev2, **opts)
@@ -367,7 +367,7 @@
         return orig(ui, repo, *pats, **opts)
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
+@eh.wrapfunction(subrepo.hgsubrepo, 'dirty')
 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
     with lfstatus(repo._repo):
         return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
@@ -485,10 +485,10 @@
         return lambda ctx: match
 
     wrappedmatchandpats = extensions.wrappedfunction(
-        scmutil, b'matchandpats', overridematchandpats
+        scmutil, 'matchandpats', overridematchandpats
     )
     wrappedmakefilematcher = extensions.wrappedfunction(
-        logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
+        logcmdutil, '_makenofollowfilematcher', overridemakefilematcher
     )
     with wrappedmatchandpats, wrappedmakefilematcher:
         return orig(ui, repo, *pats, **opts)
@@ -554,7 +554,7 @@
 # The overridden function filters the unknown files by removing any
 # largefiles. This makes the merge proceed and we can then handle this
 # case further in the overridden calculateupdates function below.
-@eh.wrapfunction(merge, b'_checkunknownfile')
+@eh.wrapfunction(merge, '_checkunknownfile')
 def overridecheckunknownfile(
     origfn, dirstate, wvfs, dircache, wctx, mctx, f, f2=None
 ):
@@ -589,7 +589,7 @@
 # Finally, the merge.applyupdates function will then take care of
 # writing the files into the working copy and lfcommands.updatelfiles
 # will update the largefiles.
-@eh.wrapfunction(merge, b'calculateupdates')
+@eh.wrapfunction(merge, 'calculateupdates')
 def overridecalculateupdates(
     origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
 ):
@@ -700,7 +700,7 @@
     return mresult
 
 
-@eh.wrapfunction(mergestatemod, b'recordupdates')
+@eh.wrapfunction(mergestatemod, 'recordupdates')
 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
     if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
@@ -716,7 +716,7 @@
 
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
-@eh.wrapfunction(filemerge, b'filemerge')
+@eh.wrapfunction(filemerge, 'filemerge')
 def overridefilemerge(
     origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
 ):
@@ -748,7 +748,7 @@
     return 0, False
 
 
-@eh.wrapfunction(copiesmod, b'pathcopies')
+@eh.wrapfunction(copiesmod, 'pathcopies')
 def copiespathcopies(orig, ctx1, ctx2, match=None):
     copies = orig(ctx1, ctx2, match=match)
     updated = {}
@@ -764,7 +764,7 @@
 # checks if the destination largefile already exists. It also keeps a
 # list of copied files so that the largefiles can be copied and the
 # dirstate updated.
-@eh.wrapfunction(cmdutil, b'copy')
+@eh.wrapfunction(cmdutil, 'copy')
 def overridecopy(orig, ui, repo, pats, opts, rename=False):
     # doesn't remove largefile on rename
     if len(pats) < 2:
@@ -793,7 +793,7 @@
         match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
         return composenormalfilematcher(match, manifest)
 
-    with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
+    with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn):
         try:
             result = orig(ui, repo, pats, opts, rename)
         except error.Abort as e:
@@ -887,8 +887,8 @@
             copiedfiles.append((src, dest))
             orig(src, dest, *args, **kwargs)
 
-        with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
-            with extensions.wrappedfunction(scmutil, b'match', overridematch):
+        with extensions.wrappedfunction(util, 'copyfile', overridecopyfile):
+            with extensions.wrappedfunction(scmutil, 'match', overridematch):
                 result += orig(ui, repo, listpats, opts, rename)
 
         lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -936,7 +936,7 @@
 # commits. Update the standins then run the original revert, changing
 # the matcher to hit standins instead of largefiles. Based on the
 # resulting standins update the largefiles.
-@eh.wrapfunction(cmdutil, b'revert')
+@eh.wrapfunction(cmdutil, 'revert')
 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
     # Because we put the standins in a bad state (by updating them)
     # and then return them to a correct state we need to lock to
@@ -999,7 +999,7 @@
             m.matchfn = matchfn
             return m
 
-        with extensions.wrappedfunction(scmutil, b'match', overridematch):
+        with extensions.wrappedfunction(scmutil, 'match', overridematch):
             orig(ui, repo, ctx, *pats, **opts)
 
         newstandins = lfutil.getstandinsstate(repo)
@@ -1079,7 +1079,7 @@
     return orig(ui, repo, *args, **kwargs)
 
 
-@eh.wrapfunction(exchange, b'pushoperation')
+@eh.wrapfunction(exchange, 'pushoperation')
 def exchangepushoperation(orig, *args, **kwargs):
     """Override pushoperation constructor and store lfrevs parameter"""
     lfrevs = kwargs.pop('lfrevs', None)
@@ -1139,7 +1139,7 @@
     return orig(ui, source, dest, **opts)
 
 
-@eh.wrapfunction(hg, b'clone')
+@eh.wrapfunction(hg, 'clone')
 def hgclone(orig, ui, opts, *args, **kwargs):
     result = orig(ui, opts, *args, **kwargs)
 
@@ -1195,7 +1195,7 @@
             kwargs['inmemory'] = False
             return orig(*args, **kwargs)
 
-        extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
+        extensions.wrapfunction(rebase, '_dorebase', _dorebase)
 
 
 @eh.wrapcommand(b'archive')
@@ -1204,13 +1204,13 @@
         return orig(ui, repo.unfiltered(), dest, **opts)
 
 
-@eh.wrapfunction(webcommands, b'archive')
+@eh.wrapfunction(webcommands, 'archive')
 def hgwebarchive(orig, web):
     with lfstatus(web.repo):
         return orig(web)
 
 
-@eh.wrapfunction(archival, b'archive')
+@eh.wrapfunction(archival, 'archive')
 def overridearchive(
     orig,
     repo,
@@ -1307,7 +1307,7 @@
     archiver.done()
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, b'archive')
+@eh.wrapfunction(subrepo.hgsubrepo, 'archive')
 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
     lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
     if not lfenabled or not repo._repo.lfstatus:
@@ -1375,7 +1375,7 @@
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
 # largefiles were changed. This is used by bisect, backout and fetch.
-@eh.wrapfunction(cmdutil, b'bailifchanged')
+@eh.wrapfunction(cmdutil, 'bailifchanged')
 def overridebailifchanged(orig, repo, *args, **kwargs):
     orig(repo, *args, **kwargs)
     with lfstatus(repo):
@@ -1384,13 +1384,13 @@
         raise error.Abort(_(b'uncommitted changes'))
 
 
-@eh.wrapfunction(cmdutil, b'postcommitstatus')
+@eh.wrapfunction(cmdutil, 'postcommitstatus')
 def postcommitstatus(orig, repo, *args, **kwargs):
     with lfstatus(repo):
         return orig(repo, *args, **kwargs)
 
 
-@eh.wrapfunction(cmdutil, b'forget')
+@eh.wrapfunction(cmdutil, 'forget')
 def cmdutilforget(
     orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
 ):
@@ -1559,7 +1559,7 @@
         orig(ui, repo, *pats, **opts)
 
 
-@eh.wrapfunction(scmutil, b'addremove')
+@eh.wrapfunction(scmutil, 'addremove')
 def scmutiladdremove(
     orig,
     repo,
@@ -1787,7 +1787,7 @@
     return err
 
 
-@eh.wrapfunction(merge, b'_update')
+@eh.wrapfunction(merge, '_update')
 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
     matcher = kwargs.get('matcher', None)
     # note if this is a partial update
@@ -1880,7 +1880,7 @@
         return result
 
 
-@eh.wrapfunction(scmutil, b'marktouched')
+@eh.wrapfunction(scmutil, 'marktouched')
 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
     result = orig(repo, files, *args, **kwargs)
 
@@ -1901,8 +1901,8 @@
     return result
 
 
-@eh.wrapfunction(upgrade_actions, b'preservedrequirements')
-@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
+@eh.wrapfunction(upgrade_actions, 'preservedrequirements')
+@eh.wrapfunction(upgrade_actions, 'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
     if b'largefiles' in repo.requirements:
@@ -1913,7 +1913,7 @@
 _lfscheme = b'largefile://'
 
 
-@eh.wrapfunction(urlmod, b'open')
+@eh.wrapfunction(urlmod, 'open')
 def openlargefile(orig, ui, url_, data=None, **kwargs):
     if url_.startswith(_lfscheme):
         if data:
--- a/hgext/largefiles/proto.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/largefiles/proto.py	Mon Aug 07 11:08:00 2023 +0200
@@ -200,7 +200,7 @@
 
 
 # advertise the largefiles=serve capability
-@eh.wrapfunction(wireprotov1server, b'_capabilities')
+@eh.wrapfunction(wireprotov1server, '_capabilities')
 def _capabilities(orig, repo, proto):
     '''announce largefile server capability'''
     caps = orig(repo, proto)
--- a/hgext/lfs/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/lfs/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -342,7 +342,7 @@
     wrapfunction(filelog, 'size', wrapper.filelogsize)
 
 
-@eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
+@eh.wrapfunction(localrepo, 'resolverevlogstorevfsoptions')
 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
     opts = orig(ui, requirements, features)
     for name, module in extensions.extensions(ui):
--- a/hgext/lfs/wireprotolfsserver.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/lfs/wireprotolfsserver.py	Mon Aug 07 11:08:00 2023 +0200
@@ -33,7 +33,7 @@
 eh = exthelper.exthelper()
 
 
-@eh.wrapfunction(wireprotoserver, b'handlewsgirequest')
+@eh.wrapfunction(wireprotoserver, 'handlewsgirequest')
 def handlewsgirequest(orig, rctx, req, res, checkperm):
     """Wrap wireprotoserver.handlewsgirequest() to possibly process an LFS
     request if it is left unprocessed by the wrapped method.
--- a/hgext/lfs/wrapper.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/lfs/wrapper.py	Mon Aug 07 11:08:00 2023 +0200
@@ -53,7 +53,7 @@
 eh = exthelper.exthelper()
 
 
-@eh.wrapfunction(localrepo, b'makefilestorage')
+@eh.wrapfunction(localrepo, 'makefilestorage')
 def localrepomakefilestorage(orig, requirements, features, **kwargs):
     if b'lfs' in requirements:
         features.add(repository.REPO_FEATURE_LFS)
@@ -61,14 +61,14 @@
     return orig(requirements=requirements, features=features, **kwargs)
 
 
-@eh.wrapfunction(changegroup, b'allsupportedversions')
+@eh.wrapfunction(changegroup, 'allsupportedversions')
 def allsupportedversions(orig, ui):
     versions = orig(ui)
     versions.add(b'03')
     return versions
 
 
-@eh.wrapfunction(wireprotov1server, b'_capabilities')
+@eh.wrapfunction(wireprotov1server, '_capabilities')
 def _capabilities(orig, repo, proto):
     '''Wrap server command to announce lfs server capability'''
     caps = orig(repo, proto)
@@ -227,7 +227,7 @@
     return orig(self, rev)
 
 
-@eh.wrapfunction(revlog, b'_verify_revision')
+@eh.wrapfunction(revlog, '_verify_revision')
 def _verify_revision(orig, rl, skipflags, state, node):
     if _islfs(rl, node=node):
         rawtext = rl.rawdata(node)
@@ -246,7 +246,7 @@
     orig(rl, skipflags, state, node)
 
 
-@eh.wrapfunction(context.basefilectx, b'cmp')
+@eh.wrapfunction(context.basefilectx, 'cmp')
 def filectxcmp(orig, self, fctx):
     """returns True if text is different than fctx"""
     # some fctx (ex. hg-git) is not based on basefilectx and do not have islfs
@@ -258,7 +258,7 @@
     return orig(self, fctx)
 
 
-@eh.wrapfunction(context.basefilectx, b'isbinary')
+@eh.wrapfunction(context.basefilectx, 'isbinary')
 def filectxisbinary(orig, self):
     if self.islfs():
         # fast path: use lfs metadata to answer isbinary
@@ -272,13 +272,13 @@
     return _islfs(self.filelog()._revlog, self.filenode())
 
 
-@eh.wrapfunction(cmdutil, b'_updatecatformatter')
+@eh.wrapfunction(cmdutil, '_updatecatformatter')
 def _updatecatformatter(orig, fm, ctx, matcher, path, decode):
     orig(fm, ctx, matcher, path, decode)
     fm.data(rawdata=ctx[path].rawdata())
 
 
-@eh.wrapfunction(scmutil, b'wrapconvertsink')
+@eh.wrapfunction(scmutil, 'wrapconvertsink')
 def convertsink(orig, sink):
     sink = orig(sink)
     if sink.repotype == b'hg':
@@ -325,7 +325,7 @@
 
 # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
 # options and blob stores are passed from othervfs to the new readonlyvfs.
-@eh.wrapfunction(vfsmod.readonlyvfs, b'__init__')
+@eh.wrapfunction(vfsmod.readonlyvfs, '__init__')
 def vfsinit(orig, self, othervfs):
     orig(self, othervfs)
     # copy lfs related options
@@ -383,10 +383,7 @@
 
 
 def uploadblobsfromrevs(repo, revs):
-    """upload lfs blobs introduced by revs
-
-    Note: also used by other extensions e. g. infinitepush. avoid renaming.
-    """
+    """upload lfs blobs introduced by revs"""
     if _canskipupload(repo):
         return
     pointers = extractpointers(repo, revs)
@@ -403,7 +400,7 @@
     return uploadblobsfromrevs(pushop.repo, pushop.outgoing.missing)
 
 
-@eh.wrapfunction(exchange, b'push')
+@eh.wrapfunction(exchange, 'push')
 def push(orig, repo, remote, *args, **kwargs):
     """bail on push if the extension isn't enabled on remote when needed, and
     update the remote store based on the destination path."""
@@ -433,7 +430,7 @@
 
 
 # when writing a bundle via "hg bundle" command, upload related LFS blobs
-@eh.wrapfunction(bundle2, b'writenewbundle')
+@eh.wrapfunction(bundle2, 'writenewbundle')
 def writenewbundle(
     orig, ui, repo, source, filename, bundletype, outgoing, *args, **kwargs
 ):
@@ -522,7 +519,7 @@
     remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore)
 
 
-@eh.wrapfunction(upgrade_engine, b'finishdatamigration')
+@eh.wrapfunction(upgrade_engine, 'finishdatamigration')
 def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements):
     orig(ui, srcrepo, dstrepo, requirements)
 
@@ -539,8 +536,8 @@
                 lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid))
 
 
-@eh.wrapfunction(upgrade_actions, b'preservedrequirements')
-@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
+@eh.wrapfunction(upgrade_actions, 'preservedrequirements')
+@eh.wrapfunction(upgrade_actions, 'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
     if b'lfs' in repo.requirements:
--- a/hgext/narrow/narrowcommands.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/narrow/narrowcommands.py	Mon Aug 07 11:08:00 2023 +0200
@@ -128,7 +128,7 @@
                 kwargs[b'depth'] = opts[b'depth']
 
         wrappedextraprepare = extensions.wrappedfunction(
-            exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
+            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
         )
 
     with wrappedextraprepare:
@@ -146,7 +146,7 @@
                 kwargs[b'depth'] = opts['depth']
 
         wrappedextraprepare = extensions.wrappedfunction(
-            exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
+            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
         )
 
     with wrappedextraprepare:
@@ -201,7 +201,7 @@
 
 
 extensions.wrapfunction(
-    exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare
+    exchange, '_pullbundle2extraprepare', pullbundle2extraprepare
 )
 
 
@@ -366,7 +366,7 @@
         kwargs[b'excludepats'] = newexcludes
 
     wrappedextraprepare = extensions.wrappedfunction(
-        exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
+        exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
     )
 
     # define a function that narrowbundle2 can call after creating the
--- a/hgext/narrow/narrowwirepeer.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/narrow/narrowwirepeer.py	Mon Aug 07 11:08:00 2023 +0200
@@ -36,7 +36,7 @@
                 kwargs["excludepats"] = b','.join(exclude)
             return orig(cmd, *args, **kwargs)
 
-        extensions.wrapfunction(peer, b'_calltwowaystream', wrapped)
+        extensions.wrapfunction(peer, '_calltwowaystream', wrapped)
 
     hg.wirepeersetupfuncs.append(wirereposetup)
 
--- a/hgext/pager.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/pager.py	Mon Aug 07 11:08:00 2023 +0200
@@ -76,7 +76,7 @@
                 ui.disablepager()
         return orig(ui, options, cmd, cmdfunc)
 
-    extensions.wrapfunction(dispatch, b'_runcommand', pagecmd)
+    extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
 
 
 attended = [b'annotate', b'cat', b'diff', b'export', b'glog', b'log', b'qdiff']
--- a/hgext/remotefilelog/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/remotefilelog/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -317,31 +317,31 @@
     changegroup.cgpacker = shallowbundle.shallowcg1packer
 
     extensions.wrapfunction(
-        changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles
+        changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
     )
     extensions.wrapfunction(
-        changegroup, b'makechangegroup', shallowbundle.makechangegroup
+        changegroup, 'makechangegroup', shallowbundle.makechangegroup
     )
-    extensions.wrapfunction(localrepo, b'makestore', storewrapper)
-    extensions.wrapfunction(exchange, b'pull', exchangepull)
-    extensions.wrapfunction(merge, b'applyupdates', applyupdates)
-    extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles)
-    extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup)
-    extensions.wrapfunction(scmutil, b'_findrenames', findrenames)
+    extensions.wrapfunction(localrepo, 'makestore', storewrapper)
+    extensions.wrapfunction(exchange, 'pull', exchangepull)
+    extensions.wrapfunction(merge, 'applyupdates', applyupdates)
+    extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
+    extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
+    extensions.wrapfunction(scmutil, '_findrenames', findrenames)
     extensions.wrapfunction(
-        copies, b'_computeforwardmissing', computeforwardmissing
+        copies, '_computeforwardmissing', computeforwardmissing
     )
-    extensions.wrapfunction(dispatch, b'runcommand', runcommand)
-    extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets)
-    extensions.wrapfunction(context.changectx, b'filectx', filectx)
-    extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx)
-    extensions.wrapfunction(patch, b'trydiff', trydiff)
-    extensions.wrapfunction(hg, b'verify', _verify)
+    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
+    extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
+    extensions.wrapfunction(context.changectx, 'filectx', filectx)
+    extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
+    extensions.wrapfunction(patch, 'trydiff', trydiff)
+    extensions.wrapfunction(hg, 'verify', _verify)
     scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
 
     # disappointing hacks below
-    extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn)
-    extensions.wrapfunction(revset, b'filelog', filelogrevset)
+    extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn)
+    extensions.wrapfunction(revset, 'filelog', filelogrevset)
     revset.symbols[b'filelog'] = revset.filelog
 
 
@@ -374,7 +374,7 @@
             else:
                 return orig(self, *args, **kwargs)
 
-        extensions.wrapfunction(exchange, b'pull', pull_shallow)
+        extensions.wrapfunction(exchange, 'pull', pull_shallow)
 
         # Wrap the stream logic to add requirements and to pass include/exclude
         # patterns around.
@@ -393,14 +393,14 @@
                 else:
                     return orig()
 
-            extensions.wrapfunction(remote, b'stream_out', stream_out_shallow)
+            extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
 
         def stream_wrap(orig, op):
             setup_streamout(op.repo, op.remote)
             return orig(op)
 
         extensions.wrapfunction(
-            streamclone, b'maybeperformlegacystreamclone', stream_wrap
+            streamclone, 'maybeperformlegacystreamclone', stream_wrap
         )
 
         def canperformstreamclone(orig, pullop, bundle2=False):
@@ -417,7 +417,7 @@
             return supported, requirements
 
         extensions.wrapfunction(
-            streamclone, b'canperformstreamclone', canperformstreamclone
+            streamclone, 'canperformstreamclone', canperformstreamclone
         )
 
     try:
@@ -721,7 +721,7 @@
             )
 
     extensions.wrapfunction(
-        remotefilelog.remotefilelog, b'addrawrevision', addrawrevision
+        remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
     )
 
     def changelogadd(orig, self, *args, **kwargs):
@@ -749,7 +749,7 @@
         del pendingfilecommits[:]
         return node
 
-    extensions.wrapfunction(changelog.changelog, b'add', changelogadd)
+    extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
 
 
 def getrenamedfn(orig, repo, endrev=None):
@@ -1083,7 +1083,7 @@
     if util.safehasattr(remote, b'_callstream'):
         remote._localrepo = repo
     elif util.safehasattr(remote, b'getbundle'):
-        extensions.wrapfunction(remote, b'getbundle', localgetbundle)
+        extensions.wrapfunction(remote, 'getbundle', localgetbundle)
 
     return orig(repo, remote, *args, **kwargs)
 
--- a/hgext/remotefilelog/remotefilelogserver.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/remotefilelog/remotefilelogserver.py	Mon Aug 07 11:08:00 2023 +0200
@@ -67,7 +67,7 @@
         )
 
     extensions.wrapfunction(
-        changegroup.cgpacker, b'generatefiles', generatefiles
+        changegroup.cgpacker, 'generatefiles', generatefiles
     )
 
 
@@ -207,7 +207,7 @@
             ):
                 yield x
 
-    extensions.wrapfunction(streamclone, b'_walkstreamfiles', _walkstreamfiles)
+    extensions.wrapfunction(streamclone, '_walkstreamfiles', _walkstreamfiles)
 
     # expose remotefilelog capabilities
     def _capabilities(orig, repo, proto):
@@ -222,7 +222,7 @@
             caps.append(b'x_rfl_getfile')
         return caps
 
-    extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
+    extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
 
     def _adjustlinkrev(orig, self, *args, **kwargs):
         # When generating file blobs, taking the real path is too slow on large
@@ -233,7 +233,7 @@
         return orig(self, *args, **kwargs)
 
     extensions.wrapfunction(
-        context.basefilectx, b'_adjustlinkrev', _adjustlinkrev
+        context.basefilectx, '_adjustlinkrev', _adjustlinkrev
     )
 
     def _iscmd(orig, cmd):
@@ -241,7 +241,7 @@
             return False
         return orig(cmd)
 
-    extensions.wrapfunction(wireprotoserver, b'iscmd', _iscmd)
+    extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd)
 
 
 def _loadfileblob(repo, cachepath, path, node):
--- a/hgext/remotenames.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/remotenames.py	Mon Aug 07 11:08:00 2023 +0200
@@ -255,7 +255,7 @@
 
 
 def extsetup(ui):
-    extensions.wrapfunction(bookmarks, b'_printbookmarks', wrapprintbookmarks)
+    extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
 
 
 def reposetup(ui, repo):
--- a/hgext/schemes.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/schemes.py	Mon Aug 07 11:08:00 2023 +0200
@@ -159,7 +159,7 @@
         else:
             hg.repo_schemes[scheme] = ShortRepository(url, scheme, t)
 
-    extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter)
+    extensions.wrapfunction(urlutil, 'hasdriveletter', hasdriveletter)
 
 
 @command(b'debugexpandscheme', norepo=True)
--- a/hgext/share.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/share.py	Mon Aug 07 11:08:00 2023 +0200
@@ -162,9 +162,9 @@
 
 
 def extsetup(ui):
-    extensions.wrapfunction(bookmarks, b'_getbkfile', getbkfile)
-    extensions.wrapfunction(bookmarks.bmstore, b'_recordchange', recordchange)
-    extensions.wrapfunction(bookmarks.bmstore, b'_writerepo', writerepo)
+    extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile)
+    extensions.wrapfunction(bookmarks.bmstore, '_recordchange', recordchange)
+    extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo)
     extensions.wrapcommand(commands.table, b'clone', clone)
 
 
--- a/hgext/sparse.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/sparse.py	Mon Aug 07 11:08:00 2023 +0200
@@ -146,7 +146,7 @@
             revs = revs.filter(ctxmatch)
         return revs
 
-    extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
+    extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
 
 
 def _clonesparsecmd(orig, ui, repo, *args, **opts):
@@ -170,7 +170,7 @@
             )
             return orig(ctx, *args, **kwargs)
 
-        extensions.wrapfunction(mergemod, b'update', clonesparse)
+        extensions.wrapfunction(mergemod, 'update', clonesparse)
     return orig(ui, repo, *args, **opts)
 
 
--- a/hgext/sqlitestore.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/sqlitestore.py	Mon Aug 07 11:08:00 2023 +0200
@@ -1330,11 +1330,11 @@
 def extsetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
     extensions.wrapfunction(
-        localrepo, b'newreporequirements', newreporequirements
+        localrepo, 'newreporequirements', newreporequirements
     )
-    extensions.wrapfunction(localrepo, b'makefilestorage', makefilestorage)
-    extensions.wrapfunction(localrepo, b'makemain', makemain)
-    extensions.wrapfunction(verify.verifier, b'__init__', verifierinit)
+    extensions.wrapfunction(localrepo, 'makefilestorage', makefilestorage)
+    extensions.wrapfunction(localrepo, 'makemain', makemain)
+    extensions.wrapfunction(verify.verifier, '__init__', verifierinit)
 
 
 def reposetup(ui, repo):
--- a/hgext/zeroconf/__init__.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/hgext/zeroconf/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -233,10 +233,10 @@
             server.close()
 
 
-extensions.wrapfunction(dispatch, b'_runcommand', cleanupafterdispatch)
+extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch)
 
-extensions.wrapfunction(uimod.ui, b'config', config)
-extensions.wrapfunction(uimod.ui, b'configitems', configitems)
-extensions.wrapfunction(uimod.ui, b'configsuboptions', configsuboptions)
-extensions.wrapfunction(hg, b'defaultdest', defaultdest)
-extensions.wrapfunction(servermod, b'create_server', zc_create_server)
+extensions.wrapfunction(uimod.ui, 'config', config)
+extensions.wrapfunction(uimod.ui, 'configitems', configitems)
+extensions.wrapfunction(uimod.ui, 'configsuboptions', configsuboptions)
+extensions.wrapfunction(hg, 'defaultdest', defaultdest)
+extensions.wrapfunction(servermod, 'create_server', zc_create_server)
--- a/i18n/ja.po	Mon Aug 07 11:05:43 2023 +0200
+++ b/i18n/ja.po	Mon Aug 07 11:08:00 2023 +0200
@@ -5875,13 +5875,6 @@
 msgstr "共有元情報を相対パスで保持 (実験的実装)"
 
 msgid ""
-"    [infinitepush]\n"
-"    # Server-side and client-side option. Pattern of the infinitepush "
-"bookmark\n"
-"    branchpattern = PATTERN"
-msgstr ""
-
-msgid ""
 "    # Server or client\n"
 "    server = False"
 msgstr ""
@@ -5973,12 +5966,6 @@
 msgstr ""
 
 msgid ""
-"    # Instructs infinitepush to forward all received bundle2 parts to the\n"
-"    # bundle for storage. Defaults to False.\n"
-"    storeallparts = True"
-msgstr ""
-
-msgid ""
 "    # routes each incoming push to the bundlestore. defaults to False\n"
 "    pushtobundlestore = True"
 msgstr ""
@@ -5991,24 +5978,10 @@
 "    bookmarks = True\n"
 msgstr ""
 
-msgid "please set infinitepush.sqlhost"
-msgstr ""
-
-msgid "please set infinitepush.reponame"
-msgstr ""
-
 #, fuzzy, python-format
 msgid "invalid log level %s"
 msgstr "不正なローカルアドレス: %s"
 
-#, fuzzy, python-format
-msgid "unknown infinitepush store type specified %s"
-msgstr "--type に未知のバンドル種別が指定されました"
-
-#, fuzzy, python-format
-msgid "unknown infinitepush index type specified %s"
-msgstr "--type に未知のバンドル種別が指定されました"
-
 #, fuzzy
 msgid "force push to go to bundle store (EXPERIMENTAL)"
 msgstr "表示対象リビジョン"
@@ -6019,10 +5992,6 @@
 msgid "see 'hg help config.paths'"
 msgstr "詳細は 'hg help config.paths' 参照"
 
-#, fuzzy
-msgid "infinitepush bookmark '{}' does not exist in path '{}'"
-msgstr "ブックマーク '%s' は存在しません"
-
 msgid "no changes found\n"
 msgstr "差分はありません\n"
 
--- a/i18n/pt_BR.po	Mon Aug 07 11:05:43 2023 +0200
+++ b/i18n/pt_BR.po	Mon Aug 07 11:08:00 2023 +0200
@@ -5940,12 +5940,6 @@
 msgstr ""
 
 msgid ""
-"    [infinitepush]\n"
-"    # Server-side and client-side option. Pattern of the infinitepush bookmark\n"
-"    branchpattern = PATTERN"
-msgstr ""
-
-msgid ""
 "    # Server or client\n"
 "    server = False"
 msgstr ""
@@ -6034,12 +6028,6 @@
 msgstr ""
 
 msgid ""
-"    # Instructs infinitepush to forward all received bundle2 parts to the\n"
-"    # bundle for storage. Defaults to False.\n"
-"    storeallparts = True"
-msgstr ""
-
-msgid ""
 "    # routes each incoming push to the bundlestore. defaults to False\n"
 "    pushtobundlestore = True"
 msgstr ""
@@ -6052,24 +6040,10 @@
 "    bookmarks = True\n"
 msgstr ""
 
-msgid "please set infinitepush.sqlhost"
-msgstr ""
-
-msgid "please set infinitepush.reponame"
-msgstr ""
-
 #, python-format
 msgid "invalid log level %s"
 msgstr ""
 
-#, python-format
-msgid "unknown infinitepush store type specified %s"
-msgstr ""
-
-#, python-format
-msgid "unknown infinitepush index type specified %s"
-msgstr ""
-
 msgid "force push to go to bundle store (EXPERIMENTAL)"
 msgstr ""
 
@@ -6079,9 +6053,6 @@
 msgid "see 'hg help config.paths'"
 msgstr "veja 'hg help config.paths'"
 
-msgid "infinitepush bookmark '{}' does not exist in path '{}'"
-msgstr ""
-
 msgid "no changes found\n"
 msgstr "nenhuma alteração encontrada\n"
 
--- a/mercurial/configitems.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/configitems.py	Mon Aug 07 11:08:00 2023 +0200
@@ -9,11 +9,21 @@
 import functools
 import re
 
+from .utils import resourceutil
+
 from . import (
     encoding,
     error,
 )
 
+try:
+    import tomllib  # pytype: disable=import-error
+
+    tomllib.load  # trigger lazy import
+except ModuleNotFoundError:
+    # Python <3.11 compat
+    from .thirdparty import tomli as tomllib
+
 
 def loadconfigtable(ui, extname, configtable):
     """update config item known to the ui with the extension ones"""
@@ -22,7 +32,7 @@
         knownkeys = set(knownitems)
         newkeys = set(items)
         for key in sorted(knownkeys & newkeys):
-            msg = b"extension '%s' overwrite config item '%s.%s'"
+            msg = b"extension '%s' overwrites config item '%s.%s'"
             msg %= (extname, section, key)
             ui.develwarn(msg, config=b'warn-config')
 
@@ -48,15 +58,19 @@
         generic=False,
         priority=0,
         experimental=False,
+        documentation="",
+        in_core_extension=None,
     ):
         self.section = section
         self.name = name
         self.default = default
+        self.documentation = documentation
         self.alias = list(alias)
         self.generic = generic
         self.priority = priority
         self.experimental = experimental
         self._re = None
+        self.in_core_extension = in_core_extension
         if generic:
             self._re = re.compile(self.name)
 
@@ -102,6 +116,74 @@
         return None
 
 
+def sanitize_item(item):
+    """Apply the transformations that are encoded on top of the pure data"""
+
+    # Set the special defaults
+    default_type_key = "default-type"
+    default_type = item.pop(default_type_key, None)
+    if default_type == "dynamic":
+        item["default"] = dynamicdefault
+    elif default_type == "list_type":
+        item["default"] = list
+    elif default_type == "lambda":
+        assert isinstance(item["default"], list)
+        default = [e.encode() for e in item["default"]]
+        item["default"] = lambda: default
+    elif default_type == "lazy_module":
+        item["default"] = lambda: encoding.encoding
+    else:
+        if default_type is not None:
+            msg = "invalid default config type %r for '%s.%s'"
+            msg %= (default_type, item["section"], item["name"])
+            raise error.ProgrammingError(msg)
+
+    # config expects bytes
+    alias = item.get("alias")
+    if alias:
+        item["alias"] = [(k.encode(), v.encode()) for (k, v) in alias]
+    if isinstance(item.get("default"), str):
+        item["default"] = item["default"].encode()
+    item["section"] = item["section"].encode()
+    item["name"] = item["name"].encode()
+
+
+def read_configitems_file():
+    """Returns the deserialized TOML structure from the configitems file"""
+    with resourceutil.open_resource(b"mercurial", b"configitems.toml") as fp:
+        return tomllib.load(fp)
+
+
+def configitems_from_toml(items):
+    """Register the configitems from the *deserialized* toml file"""
+    for item in items["items"]:
+        sanitize_item(item)
+        coreconfigitem(**item)
+
+    templates = items["templates"]
+
+    for application in items["template-applications"]:
+        template_items = templates[application["template"]]
+
+        for template_item in template_items:
+            item = template_item.copy()
+            prefix = application.get("prefix", "")
+            item["section"] = application["section"]
+            if prefix:
+                item["name"] = f'{prefix}.{item["suffix"]}'
+            else:
+                item["name"] = item["suffix"]
+
+            sanitize_item(item)
+            item.pop("suffix", None)
+            coreconfigitem(**item)
+
+
+def import_configitems_from_file():
+    as_toml = read_configitems_file()
+    configitems_from_toml(as_toml)
+
+
 coreitems = {}
 
 
@@ -129,2856 +211,4 @@
 
 coreconfigitem = getitemregister(coreitems)
 
-
-def _registerdiffopts(section, configprefix=b''):
-    coreconfigitem(
-        section,
-        configprefix + b'nodates',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'showfunc',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'unified',
-        default=None,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'git',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'ignorews',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'ignorewsamount',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'ignoreblanklines',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'ignorewseol',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'nobinary',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'noprefix',
-        default=False,
-    )
-    coreconfigitem(
-        section,
-        configprefix + b'word-diff',
-        default=False,
-    )
-
-
-coreconfigitem(
-    b'alias',
-    b'.*',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'auth',
-    b'cookiefile',
-    default=None,
-)
-_registerdiffopts(section=b'annotate')
-# bookmarks.pushing: internal hack for discovery
-coreconfigitem(
-    b'bookmarks',
-    b'pushing',
-    default=list,
-)
-# bundle.mainreporoot: internal hack for bundlerepo
-coreconfigitem(
-    b'bundle',
-    b'mainreporoot',
-    default=b'',
-)
-coreconfigitem(
-    b'censor',
-    b'policy',
-    default=b'abort',
-    experimental=True,
-)
-coreconfigitem(
-    b'chgserver',
-    b'idletimeout',
-    default=3600,
-)
-coreconfigitem(
-    b'chgserver',
-    b'skiphash',
-    default=False,
-)
-coreconfigitem(
-    b'cmdserver',
-    b'log',
-    default=None,
-)
-coreconfigitem(
-    b'cmdserver',
-    b'max-log-files',
-    default=7,
-)
-coreconfigitem(
-    b'cmdserver',
-    b'max-log-size',
-    default=b'1 MB',
-)
-coreconfigitem(
-    b'cmdserver',
-    b'max-repo-cache',
-    default=0,
-    experimental=True,
-)
-coreconfigitem(
-    b'cmdserver',
-    b'message-encodings',
-    default=list,
-)
-coreconfigitem(
-    b'cmdserver',
-    b'track-log',
-    default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
-)
-coreconfigitem(
-    b'cmdserver',
-    b'shutdown-on-interrupt',
-    default=True,
-)
-coreconfigitem(
-    b'color',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'color',
-    b'mode',
-    default=b'auto',
-)
-coreconfigitem(
-    b'color',
-    b'pagermode',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'command-templates',
-    b'graphnode',
-    default=None,
-    alias=[(b'ui', b'graphnodetemplate')],
-)
-coreconfigitem(
-    b'command-templates',
-    b'log',
-    default=None,
-    alias=[(b'ui', b'logtemplate')],
-)
-coreconfigitem(
-    b'command-templates',
-    b'mergemarker',
-    default=(
-        b'{node|short} '
-        b'{ifeq(tags, "tip", "", '
-        b'ifeq(tags, "", "", "{tags} "))}'
-        b'{if(bookmarks, "{bookmarks} ")}'
-        b'{ifeq(branch, "default", "", "{branch} ")}'
-        b'- {author|user}: {desc|firstline}'
-    ),
-    alias=[(b'ui', b'mergemarkertemplate')],
-)
-coreconfigitem(
-    b'command-templates',
-    b'pre-merge-tool-output',
-    default=None,
-    alias=[(b'ui', b'pre-merge-tool-output-template')],
-)
-coreconfigitem(
-    b'command-templates',
-    b'oneline-summary',
-    default=None,
-)
-coreconfigitem(
-    b'command-templates',
-    b'oneline-summary.*',
-    default=dynamicdefault,
-    generic=True,
-)
-_registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
-coreconfigitem(
-    b'commands',
-    b'commit.post-status',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'grep.all-files',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'commands',
-    b'merge.require-rev',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'push.require-revs',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'resolve.confirm',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'resolve.explicit-re-merge',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'resolve.mark-check',
-    default=b'none',
-)
-_registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
-coreconfigitem(
-    b'commands',
-    b'show.aliasprefix',
-    default=list,
-)
-coreconfigitem(
-    b'commands',
-    b'status.relative',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'status.skipstates',
-    default=[],
-    experimental=True,
-)
-coreconfigitem(
-    b'commands',
-    b'status.terse',
-    default=b'',
-)
-coreconfigitem(
-    b'commands',
-    b'status.verbose',
-    default=False,
-)
-coreconfigitem(
-    b'commands',
-    b'update.check',
-    default=None,
-)
-coreconfigitem(
-    b'commands',
-    b'update.requiredest',
-    default=False,
-)
-coreconfigitem(
-    b'committemplate',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'convert',
-    b'bzr.saverev',
-    default=True,
-)
-coreconfigitem(
-    b'convert',
-    b'cvsps.cache',
-    default=True,
-)
-coreconfigitem(
-    b'convert',
-    b'cvsps.fuzz',
-    default=60,
-)
-coreconfigitem(
-    b'convert',
-    b'cvsps.logencoding',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'cvsps.mergefrom',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'cvsps.mergeto',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'git.committeractions',
-    default=lambda: [b'messagedifferent'],
-)
-coreconfigitem(
-    b'convert',
-    b'git.extrakeys',
-    default=list,
-)
-coreconfigitem(
-    b'convert',
-    b'git.findcopiesharder',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'git.remoteprefix',
-    default=b'remote',
-)
-coreconfigitem(
-    b'convert',
-    b'git.renamelimit',
-    default=400,
-)
-coreconfigitem(
-    b'convert',
-    b'git.saverev',
-    default=True,
-)
-coreconfigitem(
-    b'convert',
-    b'git.similarity',
-    default=50,
-)
-coreconfigitem(
-    b'convert',
-    b'git.skipsubmodules',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.clonebranches',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.ignoreerrors',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.preserve-hash',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.revs',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.saverev',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.sourcename',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.startrev',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'hg.tagsbranch',
-    default=b'default',
-)
-coreconfigitem(
-    b'convert',
-    b'hg.usebranchnames',
-    default=True,
-)
-coreconfigitem(
-    b'convert',
-    b'ignoreancestorcheck',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'convert',
-    b'localtimezone',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'p4.encoding',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'convert',
-    b'p4.startrev',
-    default=0,
-)
-coreconfigitem(
-    b'convert',
-    b'skiptags',
-    default=False,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.debugsvnlog',
-    default=True,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.trunk',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.tags',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.branches',
-    default=None,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.startrev',
-    default=0,
-)
-coreconfigitem(
-    b'convert',
-    b'svn.dangerous-set-commit-dates',
-    default=False,
-)
-coreconfigitem(
-    b'debug',
-    b'dirstate.delaywrite',
-    default=0,
-)
-coreconfigitem(
-    b'debug',
-    b'revlog.verifyposition.changelog',
-    default=b'',
-)
-coreconfigitem(
-    b'debug',
-    b'revlog.debug-delta',
-    default=False,
-)
-# display extra information about the bundling process
-coreconfigitem(
-    b'debug',
-    b'bundling-stats',
-    default=False,
-)
-# display extra information about the unbundling process
-coreconfigitem(
-    b'debug',
-    b'unbundling-stats',
-    default=False,
-)
-coreconfigitem(
-    b'defaults',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'devel',
-    b'all-warnings',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'bundle2.debug',
-    default=False,
-)
-# which kind of delta to put in the bundled changegroup. Possible value
-# - '': use default behavior
-# - p1: force to always use delta against p1
-# - full: force to always use full content
-coreconfigitem(
-    b'devel',
-    b'bundle.delta',
-    default=b'',
-)
-coreconfigitem(
-    b'devel',
-    b'cache-vfs',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'check-locks',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'check-relroot',
-    default=False,
-)
-# Track copy information for all file, not just "added" one (very slow)
-coreconfigitem(
-    b'devel',
-    b'copy-tracing.trace-all-files',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'default-date',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'deprec-warn',
-    default=False,
-)
-# possible values:
-# - auto (the default)
-# - force-append
-# - force-new
-coreconfigitem(
-    b'devel',
-    b'dirstate.v2.data_update_mode',
-    default="auto",
-)
-coreconfigitem(
-    b'devel',
-    b'disableloaddefaultcerts',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'warn-empty-changegroup',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'legacy.exchange',
-    default=list,
-)
-# When True, revlogs use a special reference version of the nodemap, that is not
-# performant but is "known" to behave properly.
-coreconfigitem(
-    b'devel',
-    b'persistent-nodemap',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'servercafile',
-    default=b'',
-)
-# This config option is intended for use in tests only. It is a giant
-# footgun to kill security. Don't define it.
-coreconfigitem(
-    b'devel',
-    b'server-insecure-exact-protocol',
-    default=b'',
-)
-coreconfigitem(
-    b'devel',
-    b'serverrequirecert',
-    default=False,
-)
-# Makes the status algorithm wait for the existence of this file
-# (or until a timeout of `devel.sync.status.pre-dirstate-write-file-timeout`
-# seconds) before taking the lock and writing the dirstate.
-# Status signals that it's ready to wait by creating a file
-# with the same name + `.waiting`.
-# Useful when testing race conditions.
-coreconfigitem(
-    b'devel',
-    b'sync.status.pre-dirstate-write-file',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'sync.status.pre-dirstate-write-file-timeout',
-    default=2,
-)
-coreconfigitem(
-    b'devel',
-    b'sync.dirstate.post-docket-read-file',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'sync.dirstate.post-docket-read-file-timeout',
-    default=2,
-)
-coreconfigitem(
-    b'devel',
-    b'sync.dirstate.pre-read-file',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'sync.dirstate.pre-read-file-timeout',
-    default=2,
-)
-coreconfigitem(
-    b'devel',
-    b'strip-obsmarkers',
-    default=True,
-)
-coreconfigitem(
-    b'devel',
-    b'warn-config',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'warn-config-default',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'user.obsmarker',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'warn-config-unknown',
-    default=None,
-)
-coreconfigitem(
-    b'devel',
-    b'debug.copies',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'copy-tracing.multi-thread',
-    default=True,
-)
-coreconfigitem(
-    b'devel',
-    b'debug.extensions',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'debug.repo-filters',
-    default=False,
-)
-coreconfigitem(
-    b'devel',
-    b'debug.peer-request',
-    default=False,
-)
-# If discovery.exchange-heads is False, the discovery will not start with
-# remote head fetching and local head querying.
-coreconfigitem(
-    b'devel',
-    b'discovery.exchange-heads',
-    default=True,
-)
-# If devel.debug.abort-update is True, then any merge with the working copy,
-# e.g. [hg update], will be aborted after figuring out what needs to be done,
-# but before spawning the parallel worker
-coreconfigitem(
-    b'devel',
-    b'debug.abort-update',
-    default=False,
-)
-# If discovery.grow-sample is False, the sample size used in set discovery will
-# not be increased through the process
-coreconfigitem(
-    b'devel',
-    b'discovery.grow-sample',
-    default=True,
-)
-# When discovery.grow-sample.dynamic is True, the default, the sample size is
-# adapted to the shape of the undecided set (it is set to the max of:
-# <target-size>, len(roots(undecided)), len(heads(undecided)
-coreconfigitem(
-    b'devel',
-    b'discovery.grow-sample.dynamic',
-    default=True,
-)
-# discovery.grow-sample.rate control the rate at which the sample grow
-coreconfigitem(
-    b'devel',
-    b'discovery.grow-sample.rate',
-    default=1.05,
-)
-# If discovery.randomize is False, random sampling during discovery are
-# deterministic. It is meant for integration tests.
-coreconfigitem(
-    b'devel',
-    b'discovery.randomize',
-    default=True,
-)
-# Control the initial size of the discovery sample
-coreconfigitem(
-    b'devel',
-    b'discovery.sample-size',
-    default=200,
-)
-# Control the initial size of the discovery for initial change
-coreconfigitem(
-    b'devel',
-    b'discovery.sample-size.initial',
-    default=100,
-)
-_registerdiffopts(section=b'diff')
-coreconfigitem(
-    b'diff',
-    b'merge',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'email',
-    b'bcc',
-    default=None,
-)
-coreconfigitem(
-    b'email',
-    b'cc',
-    default=None,
-)
-coreconfigitem(
-    b'email',
-    b'charsets',
-    default=list,
-)
-coreconfigitem(
-    b'email',
-    b'from',
-    default=None,
-)
-coreconfigitem(
-    b'email',
-    b'method',
-    default=b'smtp',
-)
-coreconfigitem(
-    b'email',
-    b'reply-to',
-    default=None,
-)
-coreconfigitem(
-    b'email',
-    b'to',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'archivemetatemplate',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental',
-    b'auto-publish',
-    default=b'publish',
-)
-coreconfigitem(
-    b'experimental',
-    b'bundle-phases',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundle2-advertise',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundle2-output-capture',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundle2.pushback',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundle2lazylocking',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecomplevel',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecomplevel.bzip2',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecomplevel.gzip',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecomplevel.none',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecomplevel.zstd',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecompthreads',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecompthreads.bzip2',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecompthreads.gzip',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecompthreads.none',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'bundlecompthreads.zstd',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'changegroup3',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'changegroup4',
-    default=False,
-)
-
-# might remove rank configuration once the computation has no impact
-coreconfigitem(
-    b'experimental',
-    b'changelog-v2.compute-rank',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'cleanup-as-archived',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'clientcompressionengines',
-    default=list,
-)
-coreconfigitem(
-    b'experimental',
-    b'copytrace',
-    default=b'on',
-)
-coreconfigitem(
-    b'experimental',
-    b'copytrace.movecandidateslimit',
-    default=100,
-)
-coreconfigitem(
-    b'experimental',
-    b'copytrace.sourcecommitlimit',
-    default=100,
-)
-coreconfigitem(
-    b'experimental',
-    b'copies.read-from',
-    default=b"filelog-only",
-)
-coreconfigitem(
-    b'experimental',
-    b'copies.write-to',
-    default=b'filelog-only',
-)
-coreconfigitem(
-    b'experimental',
-    b'crecordtest',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'directaccess',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'directaccess.revnums',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'editortmpinhg',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution',
-    default=list,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.allowdivergence',
-    default=False,
-    alias=[(b'experimental', b'allowdivergence')],
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.allowunstable',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.createmarkers',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.effect-flags',
-    default=True,
-    alias=[(b'experimental', b'effect-flags')],
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.exchange',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.bundle-obsmarker',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.bundle-obsmarker:mandatory',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'log.topo',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.report-instabilities',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'evolution.track-operation',
-    default=True,
-)
-# repo-level config to exclude a revset visibility
-#
-# The target use case is to use `share` to expose different subset of the same
-# repository, especially server side. See also `server.view`.
-coreconfigitem(
-    b'experimental',
-    b'extra-filter-revs',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'maxdeltachainspan',
-    default=-1,
-)
-# tracks files which were undeleted (merge might delete them but we explicitly
-# kept/undeleted them) and creates new filenodes for them
-coreconfigitem(
-    b'experimental',
-    b'merge-track-salvaged',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'mmapindexthreshold',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'narrow',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'nonnormalparanoidcheck',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'exportableenviron',
-    default=list,
-)
-coreconfigitem(
-    b'experimental',
-    b'extendedheader.index',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'extendedheader.similarity',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'graphshorten',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'graphstyle.parent',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental',
-    b'graphstyle.missing',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental',
-    b'graphstyle.grandparent',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental',
-    b'hook-track-tags',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'httppostargs',
-    default=False,
-)
-coreconfigitem(b'experimental', b'nointerrupt', default=False)
-coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
-
-coreconfigitem(
-    b'experimental',
-    b'obsmarkers-exchange-debug',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'remotenames',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'removeemptydirs',
-    default=True,
-)
-coreconfigitem(
-    b'experimental',
-    b'revert.interactive.select-to-keep',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'revisions.prefixhexnode',
-    default=False,
-)
-# "out of experimental" todo list.
-#
-# * include management of a persistent nodemap in the main docket
-# * enforce a "no-truncate" policy for mmap safety
-#      - for censoring operation
-#      - for stripping operation
-#      - for rollback operation
-# * proper streaming (race free) of the docket file
-# * track garbage data to evemtually allow rewriting -existing- sidedata.
-# * Exchange-wise, we will also need to do something more efficient than
-#   keeping references to the affected revlogs, especially memory-wise when
-#   rewriting sidedata.
-# * introduce a proper solution to reduce the number of filelog related files.
-# * use caching for reading sidedata (similar to what we do for data).
-# * no longer set offset=0 if sidedata_size=0 (simplify cutoff computation).
-# * Improvement to consider
-#   - avoid compression header in chunk using the default compression?
-#   - forbid "inline" compression mode entirely?
-#   - split the data offset and flag field (the 2 bytes save are mostly trouble)
-#   - keep track of uncompressed -chunk- size (to preallocate memory better)
-#   - keep track of chain base or size (probably not that useful anymore)
-coreconfigitem(
-    b'experimental',
-    b'revlogv2',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'revisions.disambiguatewithin',
-    default=None,
-)
-coreconfigitem(
-    b'experimental',
-    b'rust.index',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'server.allow-hidden-access',
-    default=list,
-)
-coreconfigitem(
-    b'experimental',
-    b'server.filesdata.recommended-batch-size',
-    default=50000,
-)
-coreconfigitem(
-    b'experimental',
-    b'server.manifestdata.recommended-batch-size',
-    default=100000,
-)
-coreconfigitem(
-    b'experimental',
-    b'server.stream-narrow-clones',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'single-head-per-branch',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'single-head-per-branch:account-closed-heads',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'single-head-per-branch:public-changes-only',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'sparse-read',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'sparse-read.density-threshold',
-    default=0.50,
-)
-coreconfigitem(
-    b'experimental',
-    b'sparse-read.min-gap-size',
-    default=b'65K',
-)
-coreconfigitem(
-    b'experimental',
-    b'stream-v3',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'treemanifest',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'update.atomic-file',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'web.full-garbage-collection-rate',
-    default=1,  # still forcing a full collection on each request
-)
-coreconfigitem(
-    b'experimental',
-    b'worker.wdir-get-thread-safe',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'worker.repository-upgrade',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'xdiff',
-    default=False,
-)
-coreconfigitem(
-    b'extensions',
-    b'[^:]*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'extensions',
-    b'[^:]*:required',
-    default=False,
-    generic=True,
-)
-coreconfigitem(
-    b'extdata',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'format',
-    b'bookmarks-in-store',
-    default=False,
-)
-coreconfigitem(
-    b'format',
-    b'chunkcachesize',
-    default=None,
-    experimental=True,
-)
-coreconfigitem(
-    # Enable this dirstate format *when creating a new repository*.
-    # Which format to use for existing repos is controlled by .hg/requires
-    b'format',
-    b'use-dirstate-v2',
-    default=False,
-    experimental=True,
-    alias=[(b'format', b'exp-rc-dirstate-v2')],
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-tracked-hint',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-tracked-hint.version',
-    default=1,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'dotencode',
-    default=True,
-)
-coreconfigitem(
-    b'format',
-    b'generaldelta',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'manifestcachesize',
-    default=None,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'maxchainlen',
-    default=dynamicdefault,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'obsstore-version',
-    default=None,
-)
-coreconfigitem(
-    b'format',
-    b'sparse-revlog',
-    default=True,
-)
-coreconfigitem(
-    b'format',
-    b'revlog-compression',
-    default=lambda: [b'zstd', b'zlib'],
-    alias=[(b'experimental', b'format.compression')],
-)
-# Experimental TODOs:
-#
-# * Same as for revlogv2 (but for the reduction of the number of files)
-# * Actually computing the rank of changesets
-# * Improvement to investigate
-#   - storing .hgtags fnode
-#   - storing branch related identifier
-
-coreconfigitem(
-    b'format',
-    b'exp-use-changelog-v2',
-    default=None,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'usefncache',
-    default=True,
-)
-coreconfigitem(
-    b'format',
-    b'usegeneraldelta',
-    default=True,
-)
-coreconfigitem(
-    b'format',
-    b'usestore',
-    default=True,
-)
-
-
-def _persistent_nodemap_default():
-    """compute `use-persistent-nodemap` default value
-
-    The feature is disabled unless a fast implementation is available.
-    """
-    from . import policy
-
-    return policy.importrust('revlog') is not None
-
-
-coreconfigitem(
-    b'format',
-    b'use-persistent-nodemap',
-    default=_persistent_nodemap_default,
-)
-coreconfigitem(
-    b'format',
-    b'exp-use-copies-side-data-changeset',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-share-safe',
-    default=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-share-safe.automatic-upgrade-of-mismatching-repositories',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'format',
-    b'use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet',
-    default=False,
-    experimental=True,
-)
-
-# Moving this on by default means we are confident about the scaling of phases.
-# This is not garanteed to be the case at the time this message is written.
-coreconfigitem(
-    b'format',
-    b'use-internal-phase',
-    default=False,
-    experimental=True,
-)
-# The interaction between the archived phase and obsolescence markers needs to
-# be sorted out before wider usage of this are to be considered.
-#
-# At the time this message is written, behavior when archiving obsolete
-# changeset differ significantly from stripping. As part of stripping, we also
-# remove the obsolescence marker associated to the stripped changesets,
-# revealing the precedecessors changesets when applicable. When archiving, we
-# don't touch the obsolescence markers, keeping everything hidden. This can
-# result in quite confusing situation for people combining exchanging draft
-# with the archived phases. As some markers needed by others may be skipped
-# during exchange.
-coreconfigitem(
-    b'format',
-    b'exp-archived-phase',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'shelve',
-    b'store',
-    default=b'internal',
-    experimental=True,
-)
-coreconfigitem(
-    b'fsmonitor',
-    b'warn_when_unused',
-    default=True,
-)
-coreconfigitem(
-    b'fsmonitor',
-    b'warn_update_file_count',
-    default=50000,
-)
-coreconfigitem(
-    b'fsmonitor',
-    b'warn_update_file_count_rust',
-    default=400000,
-)
-coreconfigitem(
-    b'help',
-    br'hidden-command\..*',
-    default=False,
-    generic=True,
-)
-coreconfigitem(
-    b'help',
-    br'hidden-topic\..*',
-    default=False,
-    generic=True,
-)
-coreconfigitem(
-    b'hooks',
-    b'[^:]*',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'hooks',
-    b'.*:run-with-plain',
-    default=True,
-    generic=True,
-)
-coreconfigitem(
-    b'hgweb-paths',
-    b'.*',
-    default=list,
-    generic=True,
-)
-coreconfigitem(
-    b'hostfingerprints',
-    b'.*',
-    default=list,
-    generic=True,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'ciphers',
-    default=None,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'minimumprotocol',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'.*:minimumprotocol$',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'.*:ciphers$',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'.*:fingerprints$',
-    default=list,
-    generic=True,
-)
-coreconfigitem(
-    b'hostsecurity',
-    b'.*:verifycertsfile$',
-    default=None,
-    generic=True,
-)
-
-coreconfigitem(
-    b'http_proxy',
-    b'always',
-    default=False,
-)
-coreconfigitem(
-    b'http_proxy',
-    b'host',
-    default=None,
-)
-coreconfigitem(
-    b'http_proxy',
-    b'no',
-    default=list,
-)
-coreconfigitem(
-    b'http_proxy',
-    b'passwd',
-    default=None,
-)
-coreconfigitem(
-    b'http_proxy',
-    b'user',
-    default=None,
-)
-
-coreconfigitem(
-    b'http',
-    b'timeout',
-    default=None,
-)
-
-coreconfigitem(
-    b'logtoprocess',
-    b'commandexception',
-    default=None,
-)
-coreconfigitem(
-    b'logtoprocess',
-    b'commandfinish',
-    default=None,
-)
-coreconfigitem(
-    b'logtoprocess',
-    b'command',
-    default=None,
-)
-coreconfigitem(
-    b'logtoprocess',
-    b'develwarn',
-    default=None,
-)
-coreconfigitem(
-    b'logtoprocess',
-    b'uiblocked',
-    default=None,
-)
-coreconfigitem(
-    b'merge',
-    b'checkunknown',
-    default=b'abort',
-)
-coreconfigitem(
-    b'merge',
-    b'checkignored',
-    default=b'abort',
-)
-coreconfigitem(
-    b'experimental',
-    b'merge.checkpathconflicts',
-    default=False,
-)
-coreconfigitem(
-    b'merge',
-    b'followcopies',
-    default=True,
-)
-coreconfigitem(
-    b'merge',
-    b'on-failure',
-    default=b'continue',
-)
-coreconfigitem(
-    b'merge',
-    b'preferancestor',
-    default=lambda: [b'*'],
-    experimental=True,
-)
-coreconfigitem(
-    b'merge',
-    b'strict-capability-check',
-    default=False,
-)
-coreconfigitem(
-    b'merge',
-    b'disable-partial-tools',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    b'.*',
-    default=None,
-    generic=True,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    br'.*\.patterns',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    br'.*\.executable$',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    br'.*\.order',
-    default=0,
-    generic=True,
-    priority=-1,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    br'.*\.args',
-    default=b"$local $base $other",
-    generic=True,
-    priority=-1,
-    experimental=True,
-)
-coreconfigitem(
-    b'partial-merge-tools',
-    br'.*\.disable',
-    default=False,
-    generic=True,
-    priority=-1,
-    experimental=True,
-)
-coreconfigitem(
-    b'merge-tools',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.args$',
-    default=b"$local $base $other",
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.binary$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.check$',
-    default=list,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.checkchanged$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.executable$',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.fixeol$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.gui$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.mergemarkers$',
-    default=b'basic',
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.mergemarkertemplate$',
-    default=dynamicdefault,  # take from command-templates.mergemarker
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.priority$',
-    default=0,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.premerge$',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.regappend$',
-    default=b"",
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'merge-tools',
-    br'.*\.symlink$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem(
-    b'pager',
-    b'attend-.*',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'pager',
-    b'ignore',
-    default=list,
-)
-coreconfigitem(
-    b'pager',
-    b'pager',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'patch',
-    b'eol',
-    default=b'strict',
-)
-coreconfigitem(
-    b'patch',
-    b'fuzz',
-    default=2,
-)
-coreconfigitem(
-    b'paths',
-    b'default',
-    default=None,
-)
-coreconfigitem(
-    b'paths',
-    b'default-push',
-    default=None,
-)
-coreconfigitem(
-    b'paths',
-    b'[^:]*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'paths',
-    b'.*:bookmarks.mode',
-    default='default',
-    generic=True,
-)
-coreconfigitem(
-    b'paths',
-    b'.*:multi-urls',
-    default=False,
-    generic=True,
-)
-coreconfigitem(
-    b'paths',
-    b'.*:pushrev',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'paths',
-    b'.*:pushurl',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'paths',
-    b'.*:pulled-delta-reuse-policy',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'phases',
-    b'checksubrepos',
-    default=b'follow',
-)
-coreconfigitem(
-    b'phases',
-    b'new-commit',
-    default=b'draft',
-)
-coreconfigitem(
-    b'phases',
-    b'publish',
-    default=True,
-)
-coreconfigitem(
-    b'profiling',
-    b'enabled',
-    default=False,
-)
-coreconfigitem(
-    b'profiling',
-    b'format',
-    default=b'text',
-)
-coreconfigitem(
-    b'profiling',
-    b'freq',
-    default=1000,
-)
-coreconfigitem(
-    b'profiling',
-    b'limit',
-    default=30,
-)
-coreconfigitem(
-    b'profiling',
-    b'nested',
-    default=0,
-)
-coreconfigitem(
-    b'profiling',
-    b'output',
-    default=None,
-)
-coreconfigitem(
-    b'profiling',
-    b'showmax',
-    default=0.999,
-)
-coreconfigitem(
-    b'profiling',
-    b'showmin',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling',
-    b'showtime',
-    default=True,
-)
-coreconfigitem(
-    b'profiling',
-    b'sort',
-    default=b'inlinetime',
-)
-coreconfigitem(
-    b'profiling',
-    b'statformat',
-    default=b'hotpath',
-)
-coreconfigitem(
-    b'profiling',
-    b'time-track',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling',
-    b'type',
-    default=b'stat',
-)
-coreconfigitem(
-    b'progress',
-    b'assume-tty',
-    default=False,
-)
-coreconfigitem(
-    b'progress',
-    b'changedelay',
-    default=1,
-)
-coreconfigitem(
-    b'progress',
-    b'clear-complete',
-    default=True,
-)
-coreconfigitem(
-    b'progress',
-    b'debug',
-    default=False,
-)
-coreconfigitem(
-    b'progress',
-    b'delay',
-    default=3,
-)
-coreconfigitem(
-    b'progress',
-    b'disable',
-    default=False,
-)
-coreconfigitem(
-    b'progress',
-    b'estimateinterval',
-    default=60.0,
-)
-coreconfigitem(
-    b'progress',
-    b'format',
-    default=lambda: [b'topic', b'bar', b'number', b'estimate'],
-)
-coreconfigitem(
-    b'progress',
-    b'refresh',
-    default=0.1,
-)
-coreconfigitem(
-    b'progress',
-    b'width',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'pull',
-    b'confirm',
-    default=False,
-)
-coreconfigitem(
-    b'push',
-    b'pushvars.server',
-    default=False,
-)
-coreconfigitem(
-    b'rewrite',
-    b'backup-bundle',
-    default=True,
-    alias=[(b'ui', b'history-editing-backup')],
-)
-coreconfigitem(
-    b'rewrite',
-    b'update-timestamp',
-    default=False,
-)
-coreconfigitem(
-    b'rewrite',
-    b'empty-successor',
-    default=b'skip',
-    experimental=True,
-)
-# experimental as long as format.use-dirstate-v2 is.
-coreconfigitem(
-    b'storage',
-    b'dirstate-v2.slow-path',
-    default=b"abort",
-    experimental=True,
-)
-coreconfigitem(
-    b'storage',
-    b'new-repo-backend',
-    default=b'revlogv1',
-    experimental=True,
-)
-coreconfigitem(
-    b'storage',
-    b'revlog.optimize-delta-parent-choice',
-    default=True,
-    alias=[(b'format', b'aggressivemergedeltas')],
-)
-coreconfigitem(
-    b'storage',
-    b'revlog.delta-parent-search.candidate-group-chunk-size',
-    default=20,
-)
-coreconfigitem(
-    b'storage',
-    b'revlog.issue6528.fix-incoming',
-    default=True,
-)
-# experimental as long as rust is experimental (or a C version is implemented)
-coreconfigitem(
-    b'storage',
-    b'revlog.persistent-nodemap.mmap',
-    default=True,
-)
-# experimental as long as format.use-persistent-nodemap is.
-coreconfigitem(
-    b'storage',
-    b'revlog.persistent-nodemap.slow-path',
-    default=b"abort",
-)
-
-coreconfigitem(
-    b'storage',
-    b'revlog.reuse-external-delta',
-    default=True,
-)
-# This option is True unless `format.generaldelta` is set.
-coreconfigitem(
-    b'storage',
-    b'revlog.reuse-external-delta-parent',
-    default=None,
-)
-coreconfigitem(
-    b'storage',
-    b'revlog.zlib.level',
-    default=None,
-)
-coreconfigitem(
-    b'storage',
-    b'revlog.zstd.level',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bookmarks-pushkey-compat',
-    default=True,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1',
-    default=True,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1gd',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1.pull',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1gd.pull',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1.push',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bundle1gd.push',
-    default=None,
-)
-coreconfigitem(
-    b'server',
-    b'bundle2.stream',
-    default=True,
-    alias=[(b'experimental', b'bundle2.stream')],
-)
-coreconfigitem(
-    b'server',
-    b'compressionengines',
-    default=list,
-)
-coreconfigitem(
-    b'server',
-    b'concurrent-push-mode',
-    default=b'check-related',
-)
-coreconfigitem(
-    b'server',
-    b'disablefullbundle',
-    default=False,
-)
-coreconfigitem(
-    b'server',
-    b'maxhttpheaderlen',
-    default=1024,
-)
-coreconfigitem(
-    b'server',
-    b'pullbundle',
-    default=True,
-)
-coreconfigitem(
-    b'server',
-    b'preferuncompressed',
-    default=False,
-)
-coreconfigitem(
-    b'server',
-    b'streamunbundle',
-    default=False,
-)
-coreconfigitem(
-    b'server',
-    b'uncompressed',
-    default=True,
-)
-coreconfigitem(
-    b'server',
-    b'uncompressedallowsecret',
-    default=False,
-)
-coreconfigitem(
-    b'server',
-    b'view',
-    default=b'served',
-)
-coreconfigitem(
-    b'server',
-    b'validate',
-    default=False,
-)
-coreconfigitem(
-    b'server',
-    b'zliblevel',
-    default=-1,
-)
-coreconfigitem(
-    b'server',
-    b'zstdlevel',
-    default=3,
-)
-coreconfigitem(
-    b'share',
-    b'pool',
-    default=None,
-)
-coreconfigitem(
-    b'share',
-    b'poolnaming',
-    default=b'identity',
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-not-safe',
-    default=b'abort',
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-safe',
-    default=b'abort',
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-not-safe.warn',
-    default=True,
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-safe.warn',
-    default=True,
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-not-safe:verbose-upgrade',
-    default=True,
-)
-coreconfigitem(
-    b'share',
-    b'safe-mismatch.source-safe:verbose-upgrade',
-    default=True,
-)
-coreconfigitem(
-    b'shelve',
-    b'maxbackups',
-    default=10,
-)
-coreconfigitem(
-    b'smtp',
-    b'host',
-    default=None,
-)
-coreconfigitem(
-    b'smtp',
-    b'local_hostname',
-    default=None,
-)
-coreconfigitem(
-    b'smtp',
-    b'password',
-    default=None,
-)
-coreconfigitem(
-    b'smtp',
-    b'port',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'smtp',
-    b'tls',
-    default=b'none',
-)
-coreconfigitem(
-    b'smtp',
-    b'username',
-    default=None,
-)
-coreconfigitem(
-    b'sparse',
-    b'missingwarning',
-    default=True,
-    experimental=True,
-)
-coreconfigitem(
-    b'subrepos',
-    b'allowed',
-    default=dynamicdefault,  # to make backporting simpler
-)
-coreconfigitem(
-    b'subrepos',
-    b'hg:allowed',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos',
-    b'git:allowed',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos',
-    b'svn:allowed',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'templates',
-    b'.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem(
-    b'templateconfig',
-    b'.*',
-    default=dynamicdefault,
-    generic=True,
-)
-coreconfigitem(
-    b'trusted',
-    b'groups',
-    default=list,
-)
-coreconfigitem(
-    b'trusted',
-    b'users',
-    default=list,
-)
-coreconfigitem(
-    b'ui',
-    b'_usedassubrepo',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'allowemptycommit',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'archivemeta',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'askusername',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'available-memory',
-    default=None,
-)
-
-coreconfigitem(
-    b'ui',
-    b'clonebundlefallback',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'clonebundleprefers',
-    default=list,
-)
-coreconfigitem(
-    b'ui',
-    b'clonebundles',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'color',
-    default=b'auto',
-)
-coreconfigitem(
-    b'ui',
-    b'commitsubrepos',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'debug',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'debugger',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'editor',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'ui',
-    b'detailed-exit-code',
-    default=False,
-    experimental=True,
-)
-coreconfigitem(
-    b'ui',
-    b'fallbackencoding',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'forcecwd',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'forcemerge',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'formatdebug',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'formatjson',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'formatted',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'interactive',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'interface',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'interface.chunkselector',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'large-file-limit',
-    default=10 * (2 ** 20),
-)
-coreconfigitem(
-    b'ui',
-    b'logblockedtimes',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'merge',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'mergemarkers',
-    default=b'basic',
-)
-coreconfigitem(
-    b'ui',
-    b'message-output',
-    default=b'stdio',
-)
-coreconfigitem(
-    b'ui',
-    b'nontty',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'origbackuppath',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'paginate',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'patch',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'portablefilenames',
-    default=b'warn',
-)
-coreconfigitem(
-    b'ui',
-    b'promptecho',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'quiet',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'quietbookmarkmove',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'relative-paths',
-    default=b'legacy',
-)
-coreconfigitem(
-    b'ui',
-    b'remotecmd',
-    default=b'hg',
-)
-coreconfigitem(
-    b'ui',
-    b'report_untrusted',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'rollback',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'signal-safe-lock',
-    default=True,
-)
-coreconfigitem(
-    b'ui',
-    b'slash',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'ssh',
-    default=b'ssh',
-)
-coreconfigitem(
-    b'ui',
-    b'ssherrorhint',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'statuscopies',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'strict',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'style',
-    default=b'',
-)
-coreconfigitem(
-    b'ui',
-    b'supportcontact',
-    default=None,
-)
-coreconfigitem(
-    b'ui',
-    b'textwidth',
-    default=78,
-)
-coreconfigitem(
-    b'ui',
-    b'timeout',
-    default=b'600',
-)
-coreconfigitem(
-    b'ui',
-    b'timeout.warn',
-    default=0,
-)
-coreconfigitem(
-    b'ui',
-    b'timestamp-output',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'traceback',
-    default=False,
-)
-coreconfigitem(
-    b'ui',
-    b'tweakdefaults',
-    default=False,
-)
-coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
-coreconfigitem(
-    b'ui',
-    b'verbose',
-    default=False,
-)
-coreconfigitem(
-    b'verify',
-    b'skipflags',
-    default=0,
-)
-coreconfigitem(
-    b'web',
-    b'allowbz2',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'allowgz',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'allow-pull',
-    alias=[(b'web', b'allowpull')],
-    default=True,
-)
-coreconfigitem(
-    b'web',
-    b'allow-push',
-    alias=[(b'web', b'allow_push')],
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'allowzip',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'archivesubrepos',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'cache',
-    default=True,
-)
-coreconfigitem(
-    b'web',
-    b'comparisoncontext',
-    default=5,
-)
-coreconfigitem(
-    b'web',
-    b'contact',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'deny_push',
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'guessmime',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'hidden',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'labels',
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'logoimg',
-    default=b'hglogo.png',
-)
-coreconfigitem(
-    b'web',
-    b'logourl',
-    default=b'https://mercurial-scm.org/',
-)
-coreconfigitem(
-    b'web',
-    b'accesslog',
-    default=b'-',
-)
-coreconfigitem(
-    b'web',
-    b'address',
-    default=b'',
-)
-coreconfigitem(
-    b'web',
-    b'allow-archive',
-    alias=[(b'web', b'allow_archive')],
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'allow_read',
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'baseurl',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'cacerts',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'certificate',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'collapse',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'csp',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'deny_read',
-    default=list,
-)
-coreconfigitem(
-    b'web',
-    b'descend',
-    default=True,
-)
-coreconfigitem(
-    b'web',
-    b'description',
-    default=b"",
-)
-coreconfigitem(
-    b'web',
-    b'encoding',
-    default=lambda: encoding.encoding,
-)
-coreconfigitem(
-    b'web',
-    b'errorlog',
-    default=b'-',
-)
-coreconfigitem(
-    b'web',
-    b'ipv6',
-    default=False,
-)
-coreconfigitem(
-    b'web',
-    b'maxchanges',
-    default=10,
-)
-coreconfigitem(
-    b'web',
-    b'maxfiles',
-    default=10,
-)
-coreconfigitem(
-    b'web',
-    b'maxshortchanges',
-    default=60,
-)
-coreconfigitem(
-    b'web',
-    b'motd',
-    default=b'',
-)
-coreconfigitem(
-    b'web',
-    b'name',
-    default=dynamicdefault,
-)
-coreconfigitem(
-    b'web',
-    b'port',
-    default=8000,
-)
-coreconfigitem(
-    b'web',
-    b'prefix',
-    default=b'',
-)
-coreconfigitem(
-    b'web',
-    b'push_ssl',
-    default=True,
-)
-coreconfigitem(
-    b'web',
-    b'refreshinterval',
-    default=20,
-)
-coreconfigitem(
-    b'web',
-    b'server-header',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'static',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'staticurl',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'stripes',
-    default=1,
-)
-coreconfigitem(
-    b'web',
-    b'style',
-    default=b'paper',
-)
-coreconfigitem(
-    b'web',
-    b'templates',
-    default=None,
-)
-coreconfigitem(
-    b'web',
-    b'view',
-    default=b'served',
-    experimental=True,
-)
-coreconfigitem(
-    b'worker',
-    b'backgroundclose',
-    default=dynamicdefault,
-)
-# Windows defaults to a limit of 512 open files. A buffer of 128
-# should give us enough headway.
-coreconfigitem(
-    b'worker',
-    b'backgroundclosemaxqueue',
-    default=384,
-)
-coreconfigitem(
-    b'worker',
-    b'backgroundcloseminfilecount',
-    default=2048,
-)
-coreconfigitem(
-    b'worker',
-    b'backgroundclosethreadcount',
-    default=4,
-)
-coreconfigitem(
-    b'worker',
-    b'enabled',
-    default=True,
-)
-coreconfigitem(
-    b'worker',
-    b'numcpus',
-    default=None,
-)
-
-# Rebase related configuration moved to core because other extension are doing
-# strange things. For example, shelve import the extensions to reuse some bit
-# without formally loading it.
-coreconfigitem(
-    b'commands',
-    b'rebase.requiredest',
-    default=False,
-)
-coreconfigitem(
-    b'experimental',
-    b'rebaseskipobsolete',
-    default=True,
-)
-coreconfigitem(
-    b'rebase',
-    b'singletransaction',
-    default=False,
-)
-coreconfigitem(
-    b'rebase',
-    b'experimental.inmemory',
-    default=False,
-)
-
-# This setting controls creation of a rebase_source extra field
-# during rebase. When False, no such field is created. This is
-# useful eg for incrementally converting changesets and then
-# rebasing them onto an existing repo.
-# WARNING: this is an advanced setting reserved for people who know
-# exactly what they are doing. Misuse of this setting can easily
-# result in obsmarker cycles and a vivid headache.
-coreconfigitem(
-    b'rebase',
-    b'store-source',
-    default=True,
-    experimental=True,
-)
+import_configitems_from_file()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/configitems.toml	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,2839 @@
+# configitems.toml - centralized declaration of configuration options
+#
+# This file contains declarations of the core Mercurial configuration options.
+#
+# # Structure
+#
+# items: array of config items
+# templates: mapping of template name to template declaration
+# template-applications: array of template applications
+#
+# # Elements
+#
+# ## Item
+#
+# Declares a core Mercurial option.
+#
+# - section: string (required)
+# - name: string (required)
+# - default-type: boolean, changes how `default` is read
+# - default: any
+# - generic: boolean
+# - priority: integer, only if `generic` is true
+# - alias: list of 2-tuples of strings
+# - experimental: boolean
+# - documentation: string
+# - in_core_extension: string
+#
+# ## Template
+#
+# Declares a group of options to be re-used for multiple sections.
+#
+# - all the same fields as `Item`, except `section` and `name`
+# - `suffix` (string, required)
+#
+# ## Template applications
+#
+# Uses a `Template` to instanciate its options in a given section.
+#
+# - template: string (required, must match a `Template` name)
+# - section: string (required)
+
+[[items]]
+section = "alias"
+name = ".*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "auth"
+name = "cookiefile"
+
+# bookmarks.pushing: internal hack for discovery
+[[items]]
+section = "bookmarks"
+name = "pushing"
+default-type = "list_type"
+
+# bundle.mainreporoot: internal hack for bundlerepo
+[[items]]
+section = "bundle"
+name = "mainreporoot"
+default = ""
+
+[[items]]
+section = "censor"
+name = "policy"
+default = "abort"
+experimental = true
+
+[[items]]
+section = "chgserver"
+name = "idletimeout"
+default = 3600
+
+[[items]]
+section = "chgserver"
+name = "skiphash"
+default = false
+
+[[items]]
+section = "cmdserver"
+name = "log"
+
+[[items]]
+section = "cmdserver"
+name = "max-log-files"
+default = 7
+
+[[items]]
+section = "cmdserver"
+name = "max-log-size"
+default = "1 MB"
+
+[[items]]
+section = "cmdserver"
+name = "max-repo-cache"
+default = 0
+experimental = true
+
+[[items]]
+section = "cmdserver"
+name = "message-encodings"
+default-type = "list_type"
+
+[[items]]
+section = "cmdserver"
+name = "shutdown-on-interrupt"
+default = true
+
+[[items]]
+section = "cmdserver"
+name = "track-log"
+default-type = "lambda"
+default = [ "chgserver", "cmdserver", "repocache",]
+
+[[items]]
+section = "color"
+name = ".*"
+generic = true
+
+[[items]]
+section = "color"
+name = "mode"
+default = "auto"
+
+[[items]]
+section = "color"
+name = "pagermode"
+default-type = "dynamic"
+
+[[items]]
+section = "command-templates"
+name = "graphnode"
+alias = [["ui", "graphnodetemplate"]]
+
+[[items]]
+section = "command-templates"
+name = "log"
+alias = [["ui", "logtemplate"]]
+
+[[items]]
+section = "command-templates"
+name = "mergemarker"
+default = '{node|short} {ifeq(tags, "tip", "", ifeq(tags, "", "", "{tags} "))}{if(bookmarks, "{bookmarks} ")}{ifeq(branch, "default", "", "{branch} ")}- {author|user}: {desc|firstline}'
+alias = [["ui", "mergemarkertemplate"]]
+
+[[items]]
+section = "command-templates"
+name = "oneline-summary"
+
+[[items]]
+section = "command-templates"
+name = "oneline-summary.*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "command-templates"
+name = "pre-merge-tool-output"
+alias = [["ui", "pre-merge-tool-output-template"]]
+
+[[items]]
+section = "commands"
+name = "commit.post-status"
+default = false
+
+[[items]]
+section = "commands"
+name = "grep.all-files"
+default = false
+experimental = true
+
+[[items]]
+section = "commands"
+name = "merge.require-rev"
+default = false
+
+[[items]]
+section = "commands"
+name = "push.require-revs"
+default = false
+
+# Rebase related configuration moved to core because other extension are doing
+# strange things. For example, shelve import the extensions to reuse some bit
+# without formally loading it.
+[[items]]
+section = "commands"
+name = "rebase.requiredest"
+default = false
+
+[[items]]
+section = "commands"
+name = "resolve.confirm"
+default = false
+
+[[items]]
+section = "commands"
+name = "resolve.explicit-re-merge"
+default = false
+
+[[items]]
+section = "commands"
+name = "resolve.mark-check"
+default = "none"
+
+[[items]]
+section = "commands"
+name = "show.aliasprefix"
+default-type = "list_type"
+
+[[items]]
+section = "commands"
+name = "status.relative"
+default = false
+
+[[items]]
+section = "commands"
+name = "status.skipstates"
+default = []
+experimental = true
+
+[[items]]
+section = "commands"
+name = "status.terse"
+default = ""
+
+[[items]]
+section = "commands"
+name = "status.verbose"
+default = false
+
+[[items]]
+section = "commands"
+name = "update.check"
+
+[[items]]
+section = "commands"
+name = "update.requiredest"
+default = false
+
+[[items]]
+section = "committemplate"
+name = ".*"
+generic = true
+
+[[items]]
+section = "convert"
+name = "bzr.saverev"
+default = true
+
+[[items]]
+section = "convert"
+name = "cvsps.cache"
+default = true
+
+[[items]]
+section = "convert"
+name = "cvsps.fuzz"
+default = 60
+
+[[items]]
+section = "convert"
+name = "cvsps.logencoding"
+
+[[items]]
+section = "convert"
+name = "cvsps.mergefrom"
+
+[[items]]
+section = "convert"
+name = "cvsps.mergeto"
+
+[[items]]
+section = "convert"
+name = "git.committeractions"
+default-type = "lambda"
+default = [ "messagedifferent",]
+
+[[items]]
+section = "convert"
+name = "git.extrakeys"
+default-type = "list_type"
+
+[[items]]
+section = "convert"
+name = "git.findcopiesharder"
+default = false
+
+[[items]]
+section = "convert"
+name = "git.remoteprefix"
+default = "remote"
+
+[[items]]
+section = "convert"
+name = "git.renamelimit"
+default = 400
+
+[[items]]
+section = "convert"
+name = "git.saverev"
+default = true
+
+[[items]]
+section = "convert"
+name = "git.similarity"
+default = 50
+
+[[items]]
+section = "convert"
+name = "git.skipsubmodules"
+default = false
+
+[[items]]
+section = "convert"
+name = "hg.clonebranches"
+default = false
+
+[[items]]
+section = "convert"
+name = "hg.ignoreerrors"
+default = false
+
+[[items]]
+section = "convert"
+name = "hg.preserve-hash"
+default = false
+
+[[items]]
+section = "convert"
+name = "hg.revs"
+
+[[items]]
+section = "convert"
+name = "hg.saverev"
+default = false
+
+[[items]]
+section = "convert"
+name = "hg.sourcename"
+
+[[items]]
+section = "convert"
+name = "hg.startrev"
+
+[[items]]
+section = "convert"
+name = "hg.tagsbranch"
+default = "default"
+
+[[items]]
+section = "convert"
+name = "hg.usebranchnames"
+default = true
+
+[[items]]
+section = "convert"
+name = "ignoreancestorcheck"
+default = false
+experimental = true
+
+[[items]]
+section = "convert"
+name = "localtimezone"
+default = false
+
+[[items]]
+section = "convert"
+name = "p4.encoding"
+default-type = "dynamic"
+
+[[items]]
+section = "convert"
+name = "p4.startrev"
+default = 0
+
+[[items]]
+section = "convert"
+name = "skiptags"
+default = false
+
+[[items]]
+section = "convert"
+name = "svn.branches"
+
+[[items]]
+section = "convert"
+name = "svn.dangerous-set-commit-dates"
+default = false
+
+[[items]]
+section = "convert"
+name = "svn.debugsvnlog"
+default = true
+
+[[items]]
+section = "convert"
+name = "svn.startrev"
+default = 0
+
+[[items]]
+section = "convert"
+name = "svn.tags"
+
+[[items]]
+section = "convert"
+name = "svn.trunk"
+
+[[items]]
+section = "debug"
+name = "bundling-stats"
+default = false
+documentation = "Display extra information about the bundling process."
+
+[[items]]
+section = "debug"
+name = "dirstate.delaywrite"
+default = 0
+
+[[items]]
+section = "debug"
+name = "revlog.debug-delta"
+default = false
+
+[[items]]
+section = "debug"
+name = "revlog.verifyposition.changelog"
+default = ""
+
+[[items]]
+section = "debug"
+name = "unbundling-stats"
+default = false
+documentation = "Display extra information about the unbundling process."
+
+[[items]]
+section = "defaults"
+name = ".*"
+generic = true
+
+[[items]]
+section = "devel"
+name = "all-warnings"
+default = false
+
+[[items]]
+section = "devel"
+name = "bundle.delta"
+default = ""
+
+[[items]]
+section = "devel"
+name = "bundle2.debug"
+default = false
+
+[[items]]
+section = "devel"
+name = "cache-vfs"
+
+[[items]]
+section = "devel"
+name = "check-locks"
+default = false
+
+[[items]]
+section = "devel"
+name = "check-relroot"
+default = false
+
+[[items]]
+section = "devel"
+name = "copy-tracing.multi-thread"
+default = true
+
+# Track copy information for all files, not just "added" ones (very slow)
+[[items]]
+section = "devel"
+name = "copy-tracing.trace-all-files"
+default = false
+
+[[items]]
+section = "devel"
+name = "debug.abort-update"
+default = false
+documentation = """If true, then any merge with the working copy, \
+e.g. [hg update], will be aborted after figuring out what needs to be done, \
+but before spawning the parallel worker."""
+
+[[items]]
+section = "devel"
+name = "debug.copies"
+default = false
+
+[[items]]
+section = "devel"
+name = "debug.extensions"
+default = false
+
+[[items]]
+section = "devel"
+name = "debug.peer-request"
+default = false
+
+[[items]]
+section = "devel"
+name = "debug.repo-filters"
+default = false
+
+[[items]]
+section = "devel"
+name = "default-date"
+
+[[items]]
+section = "devel"
+name = "deprec-warn"
+default = false
+
+# possible values:
+# - auto (the default)
+# - force-append
+# - force-new
+[[items]]
+section = "devel"
+name = "dirstate.v2.data_update_mode"
+default = "auto"
+
+[[items]]
+section = "devel"
+name = "disableloaddefaultcerts"
+default = false
+
+[[items]]
+section = "devel"
+name = "discovery.exchange-heads"
+default = true
+documentation = """If false, the discovery will not start with remote \
+head fetching and local head querying."""
+
+[[items]]
+section = "devel"
+name = "discovery.grow-sample"
+default = true
+documentation = """If false, the sample size used in set discovery \
+will not be increased through the process."""
+
+[[items]]
+section = "devel"
+name = "discovery.grow-sample.dynamic"
+default = true
+documentation = """If true, the default, the sample size is adapted to the shape \
+of the undecided set. It is set to the max of:
+`<target-size>, len(roots(undecided)), len(heads(undecided))`"""
+
+[[items]]
+section = "devel"
+name = "discovery.grow-sample.rate"
+default = 1.05
+documentation = "Controls the rate at which the sample grows."
+
+[[items]]
+section = "devel"
+name = "discovery.randomize"
+default = true
+documentation = """If false, random samplings during discovery are deterministic. \
+It is meant for integration tests."""
+
+[[items]]
+section = "devel"
+name = "discovery.sample-size"
+default = 200
+documentation = "Controls the initial size of the discovery sample."
+
+[[items]]
+section = "devel"
+name = "discovery.sample-size.initial"
+default = 100
+documentation = "Controls the initial size of the discovery for initial change."
+
+[[items]]
+section = "devel"
+name = "legacy.exchange"
+default-type = "list_type"
+
+[[items]]
+section = "devel"
+name = "persistent-nodemap"
+default = false
+documentation = """When true, revlogs use a special reference version of the \
+nodemap, that is not performant but is "known" to behave properly."""
+
+[[items]]
+section = "devel"
+name = "server-insecure-exact-protocol"
+default = ""
+
+[[items]]
+section = "devel"
+name = "servercafile"
+default = ""
+
+[[items]]
+section = "devel"
+name = "serverexactprotocol"
+default = ""
+
+[[items]]
+section = "devel"
+name = "serverrequirecert"
+default = false
+
+[[items]]
+section = "devel"
+name = "strip-obsmarkers"
+default = true
+
+[[items]]
+section = 'devel'
+name = 'sync.status.pre-dirstate-write-file'
+documentation = """
+Makes the status algorithm wait for the existence of this file \
+(or until a timeout of `devel.sync.status.pre-dirstate-write-file-timeout` \
+seconds) before taking the lock and writing the dirstate. \
+Status signals that it's ready to wait by creating a file \
+with the same name + `.waiting`. \
+Useful when testing race conditions."""
+
+[[items]]
+section = 'devel'
+name = 'sync.status.pre-dirstate-write-file-timeout'
+default=2
+
+[[items]]
+section = 'devel'
+name = 'sync.dirstate.post-docket-read-file'
+
+[[items]]
+section = 'devel'
+name = 'sync.dirstate.post-docket-read-file-timeout'
+default=2
+
+[[items]]
+section = 'devel'
+name = 'sync.dirstate.pre-read-file'
+
+[[items]]
+section = 'devel'
+name = 'sync.dirstate.pre-read-file-timeout'
+default=2
+
+[[items]]
+section = "devel"
+name = "user.obsmarker"
+
+[[items]]
+section = "devel"
+name = "warn-config"
+
+[[items]]
+section = "devel"
+name = "warn-config-default"
+
+[[items]]
+section = "devel"
+name = "warn-config-unknown"
+
+[[items]]
+section = "devel"
+name = "warn-empty-changegroup"
+default = false
+
+[[items]]
+section = "diff"
+name = "merge"
+default = false
+experimental = true
+
+[[items]]
+section = "email"
+name = "bcc"
+
+[[items]]
+section = "email"
+name = "cc"
+
+[[items]]
+section = "email"
+name = "charsets"
+default-type = "list_type"
+
+[[items]]
+section = "email"
+name = "from"
+
+[[items]]
+section = "email"
+name = "method"
+default = "smtp"
+
+[[items]]
+section = "email"
+name = "reply-to"
+
+[[items]]
+section = "email"
+name = "to"
+
+[[items]]
+section = "experimental"
+name = "archivemetatemplate"
+default-type = "dynamic"
+
+[[items]]
+section = "experimental"
+name = "auto-publish"
+default = "publish"
+
+[[items]]
+section = "experimental"
+name = "bundle-phases"
+default = false
+
+[[items]]
+section = "experimental"
+name = "bundle2-advertise"
+default = true
+
+[[items]]
+section = "experimental"
+name = "bundle2-output-capture"
+default = false
+
+[[items]]
+section = "experimental"
+name = "bundle2.pushback"
+default = false
+
+[[items]]
+section = "experimental"
+name = "bundle2lazylocking"
+default = false
+
+[[items]]
+section = "experimental"
+name = "bundlecomplevel"
+
+[[items]]
+section = "experimental"
+name = "bundlecomplevel.bzip2"
+
+[[items]]
+section = "experimental"
+name = "bundlecomplevel.gzip"
+
+[[items]]
+section = "experimental"
+name = "bundlecomplevel.none"
+
+[[items]]
+section = "experimental"
+name = "bundlecomplevel.zstd"
+
+[[items]]
+section = "experimental"
+name = "bundlecompthreads"
+
+[[items]]
+section = "experimental"
+name = "bundlecompthreads.bzip2"
+
+[[items]]
+section = "experimental"
+name = "bundlecompthreads.gzip"
+
+[[items]]
+section = "experimental"
+name = "bundlecompthreads.none"
+
+[[items]]
+section = "experimental"
+name = "bundlecompthreads.zstd"
+
+[[items]]
+section = "experimental"
+name = "changegroup3"
+default = true
+
+[[items]]
+section = "experimental"
+name = "changegroup4"
+default = false
+
+# might remove rank configuration once the computation has no impact
+[[items]]
+section = "experimental"
+name = "changelog-v2.compute-rank"
+default = true
+
+[[items]]
+section = "experimental"
+name = "cleanup-as-archived"
+default = false
+
+[[items]]
+section = "experimental"
+name = "clientcompressionengines"
+default-type = "list_type"
+
+[[items]]
+section = "experimental"
+name = "copies.read-from"
+default = "filelog-only"
+
+[[items]]
+section = "experimental"
+name = "copies.write-to"
+default = "filelog-only"
+
+[[items]]
+section = "experimental"
+name = "copytrace"
+default = "on"
+
+[[items]]
+section = "experimental"
+name = "copytrace.movecandidateslimit"
+default = 100
+
+[[items]]
+section = "experimental"
+name = "copytrace.sourcecommitlimit"
+default = 100
+
+[[items]]
+section = "experimental"
+name = "crecordtest"
+
+[[items]]
+section = "experimental"
+name = "directaccess"
+default = false
+
+[[items]]
+section = "experimental"
+name = "directaccess.revnums"
+default = false
+
+[[items]]
+section = "experimental"
+name = "editortmpinhg"
+default = false
+
+[[items]]
+section = "experimental"
+name = "evolution"
+default-type = "list_type"
+
+[[items]]
+section = "experimental"
+name = "evolution.allowdivergence"
+default = false
+alias = [["experimental", "allowdivergence"]]
+
+[[items]]
+section = "experimental"
+name = "evolution.allowunstable"
+
+[[items]]
+section = "experimental"
+name = "evolution.bundle-obsmarker"
+default = false
+
+[[items]]
+section = "experimental"
+name = "evolution.bundle-obsmarker:mandatory"
+default = true
+
+[[items]]
+section = "experimental"
+name = "evolution.createmarkers"
+
+[[items]]
+section = "experimental"
+name = "evolution.effect-flags"
+default = true
+alias = [["experimental", "effect-flags"]]
+
+[[items]]
+section = "experimental"
+name = "evolution.exchange"
+
+[[items]]
+section = "experimental"
+name = "evolution.report-instabilities"
+default = true
+
+[[items]]
+section = "experimental"
+name = "evolution.track-operation"
+default = true
+
+[[items]]
+section = "experimental"
+name = "exportableenviron"
+default-type = "list_type"
+
+[[items]]
+section = "experimental"
+name = "extendedheader.index"
+
+[[items]]
+section = "experimental"
+name = "extendedheader.similarity"
+default = false
+
+[[items]]
+section = "experimental"
+name = "extra-filter-revs"
+documentation = """Repo-level config to prevent a revset from being visible.
+The target use case is to use `share` to expose different subsets of the same \
+repository, especially server side. See also `server.view`."""
+
+[[items]]
+section = "experimental"
+name = "graphshorten"
+default = false
+
+[[items]]
+section = "experimental"
+name = "graphstyle.grandparent"
+default-type = "dynamic"
+
+[[items]]
+section = "experimental"
+name = "graphstyle.missing"
+default-type = "dynamic"
+
+[[items]]
+section = "experimental"
+name = "graphstyle.parent"
+default-type = "dynamic"
+
+[[items]]
+section = "experimental"
+name = "hook-track-tags"
+default = false
+
+[[items]]
+section = "experimental"
+name = "httppostargs"
+default = false
+
+[[items]]
+section = "experimental"
+name = "log.topo"
+default = false
+
+[[items]]
+section = "experimental"
+name = "maxdeltachainspan"
+default = -1
+
+[[items]]
+section = "experimental"
+name = "merge-track-salvaged"
+default = false
+documentation = """Tracks files which were undeleted (merge might delete them \
+but we explicitly kept/undeleted them) and creates new filenodes for them."""
+
+[[items]]
+section = "experimental"
+name = "merge.checkpathconflicts"
+default = false
+
+[[items]]
+section = "experimental"
+name = "mmapindexthreshold"
+
+[[items]]
+section = "experimental"
+name = "narrow"
+default = false
+
+[[items]]
+section = "experimental"
+name = "nointerrupt"
+default = false
+
+[[items]]
+section = "experimental"
+name = "nointerrupt-interactiveonly"
+default = true
+
+[[items]]
+section = "experimental"
+name = "nonnormalparanoidcheck"
+default = false
+
+[[items]]
+section = "experimental"
+name = "obsmarkers-exchange-debug"
+default = false
+
+[[items]]
+section = "experimental"
+name = "rebaseskipobsolete"
+default = true
+
+[[items]]
+section = "experimental"
+name = "remotenames"
+default = false
+
+[[items]]
+section = "experimental"
+name = "removeemptydirs"
+default = true
+
+[[items]]
+section = "experimental"
+name = "revert.interactive.select-to-keep"
+default = false
+
+[[items]]
+section = "experimental"
+name = "revisions.disambiguatewithin"
+
+[[items]]
+section = "experimental"
+name = "revisions.prefixhexnode"
+default = false
+
+# "out of experimental" todo list.
+#
+# * include management of a persistent nodemap in the main docket
+# * enforce a "no-truncate" policy for mmap safety
+#      - for censoring operation
+#      - for stripping operation
+#      - for rollback operation
+# * proper streaming (race free) of the docket file
+# * track garbage data to evemtually allow rewriting -existing- sidedata.
+# * Exchange-wise, we will also need to do something more efficient than
+#   keeping references to the affected revlogs, especially memory-wise when
+#   rewriting sidedata.
+# * introduce a proper solution to reduce the number of filelog related files.
+# * use caching for reading sidedata (similar to what we do for data).
+# * no longer set offset=0 if sidedata_size=0 (simplify cutoff computation).
+# * Improvement to consider
+#   - avoid compression header in chunk using the default compression?
+#   - forbid "inline" compression mode entirely?
+#   - split the data offset and flag field (the 2 bytes save are mostly trouble)
+#   - keep track of uncompressed -chunk- size (to preallocate memory better)
+#   - keep track of chain base or size (probably not that useful anymore)
+[[items]]
+section = "experimental"
+name = "revlogv2"
+
+[[items]]
+section = "experimental"
+name = "rust.index"
+default = false
+
+[[items]]
+section = "experimental"
+name = "server.allow-hidden-access"
+default-type = "list_type"
+
+[[items]]
+section = "experimental"
+name = "server.filesdata.recommended-batch-size"
+default = 50000
+
+[[items]]
+section = "experimental"
+name = "server.manifestdata.recommended-batch-size"
+default = 100000
+
+[[items]]
+section = "experimental"
+name = "server.stream-narrow-clones"
+default = false
+
+[[items]]
+section = "experimental"
+name = "single-head-per-branch"
+default = false
+
+[[items]]
+section = "experimental"
+name = "single-head-per-branch:account-closed-heads"
+default = false
+
+[[items]]
+section = "experimental"
+name = "single-head-per-branch:public-changes-only"
+default = false
+
+[[items]]
+section = "experimental"
+name = "sparse-read"
+default = false
+
+[[items]]
+section = "experimental"
+name = "sparse-read.density-threshold"
+default = 0.5
+
+[[items]]
+section = "experimental"
+name = "sparse-read.min-gap-size"
+default = "65K"
+
+[[items]]
+section = "experimental"
+name = "stream-v3"
+default = false
+
+[[items]]
+section = "experimental"
+name = "treemanifest"
+default = false
+
+[[items]]
+section = "experimental"
+name = "update.atomic-file"
+default = false
+
+[[items]]
+section = "experimental"
+name = "web.full-garbage-collection-rate"
+default = 1  # still forcing a full collection on each request
+
+[[items]]
+section = "experimental"
+name = "worker.repository-upgrade"
+default = false
+
+[[items]]
+section = "experimental"
+name = "worker.wdir-get-thread-safe"
+default = false
+
+[[items]]
+section = "experimental"
+name = "xdiff"
+default = false
+
+[[items]]
+section = "extdata"
+name = ".*"
+generic = true
+
+[[items]]
+section = "extensions"
+name = "[^:]*"
+generic = true
+
+[[items]]
+section = "extensions"
+name = "[^:]*:required"
+default = false
+generic = true
+
+[[items]]
+section = "format"
+name = "bookmarks-in-store"
+default = false
+
+[[items]]
+section = "format"
+name = "chunkcachesize"
+experimental = true
+
+[[items]]
+section = "format"
+name = "dotencode"
+default = true
+
+# The interaction between the archived phase and obsolescence markers needs to
+# be sorted out before wider usage of this are to be considered.
+#
+# At the time this message is written, behavior when archiving obsolete
+# changeset differ significantly from stripping. As part of stripping, we also
+# remove the obsolescence marker associated to the stripped changesets,
+# revealing the precedecessors changesets when applicable. When archiving, we
+# don't touch the obsolescence markers, keeping everything hidden. This can
+# result in quite confusing situation for people combining exchanging draft
+# with the archived phases. As some markers needed by others may be skipped
+# during exchange.
+[[items]]
+section = "format"
+name = "exp-archived-phase"
+default = false
+experimental = true
+
+# Experimental TODOs:
+#
+# * Same as for revlogv2 (but for the reduction of the number of files)
+# * Actually computing the rank of changesets
+# * Improvement to investigate
+#   - storing .hgtags fnode
+#   - storing branch related identifier
+[[items]]
+section = "format"
+name = "exp-use-changelog-v2"
+experimental = true
+
+[[items]]
+section = "format"
+name = "exp-use-copies-side-data-changeset"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "generaldelta"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "manifestcachesize"
+experimental = true
+
+[[items]]
+section = "format"
+name = "maxchainlen"
+default-type = "dynamic"
+experimental = true
+
+[[items]]
+section = "format"
+name = "obsstore-version"
+
+[[items]]
+section = "format"
+name = "revlog-compression"
+default-type = "lambda"
+alias = [["experimental", "format.compression"]]
+default = [ "zstd", "zlib",]
+
+[[items]]
+section = "format"
+name = "sparse-revlog"
+default = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-tracked-hint"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-tracked-hint.version"
+default = 1
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-v2"
+default = false
+alias = [["format", "exp-rc-dirstate-v2"]]
+experimental = true
+documentation = """Enables dirstate-v2 format *when creating a new repository*.
+Which format to use for existing repos is controlled by `.hg/requires`."""
+
+[[items]]
+section = "format"
+name = "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet"
+default = false
+experimental = true
+
+# Having this on by default means we are confident about the scaling of phases.
+# This is not garanteed to be the case at the time this message is written.
+[[items]]
+section = "format"
+name = "use-internal-phase"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-persistent-nodemap"
+default-type = "dynamic"
+
+[[items]]
+section = "format"
+name = "use-share-safe"
+default = true
+
+[[items]]
+section = "format"
+name = "use-share-safe.automatic-upgrade-of-mismatching-repositories"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet"
+default = false
+experimental = true
+
+[[items]]
+section = "format"
+name = "usefncache"
+default = true
+
+[[items]]
+section = "format"
+name = "usegeneraldelta"
+default = true
+
+[[items]]
+section = "format"
+name = "usestore"
+default = true
+
+[[items]]
+section = "fsmonitor"
+name = "warn_update_file_count"
+default = 50000
+
+[[items]]
+section = "fsmonitor"
+name = "warn_update_file_count_rust"
+default = 400000
+
+[[items]]
+section = "fsmonitor"
+name = "warn_when_unused"
+default = true
+
+[[items]]
+section = "help"
+name = 'hidden-command\..*'
+default = false
+generic = true
+
+[[items]]
+section = "help"
+name = 'hidden-topic\..*'
+default = false
+generic = true
+
+[[items]]
+section = "hgweb-paths"
+name = ".*"
+default-type = "list_type"
+generic = true
+
+[[items]]
+section = "hooks"
+name = ".*:run-with-plain"
+default = true
+generic = true
+
+[[items]]
+section = "hooks"
+name = "[^:]*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "hostfingerprints"
+name = ".*"
+default-type = "list_type"
+generic = true
+
+[[items]]
+section = "hostsecurity"
+name = ".*:ciphers$"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "hostsecurity"
+name = ".*:fingerprints$"
+default-type = "list_type"
+generic = true
+
+[[items]]
+section = "hostsecurity"
+name = ".*:minimumprotocol$"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "hostsecurity"
+name = ".*:verifycertsfile$"
+generic = true
+
+[[items]]
+section = "hostsecurity"
+name = "ciphers"
+
+[[items]]
+section = "hostsecurity"
+name = "minimumprotocol"
+default-type = "dynamic"
+
+[[items]]
+section = "http"
+name = "timeout"
+
+[[items]]
+section = "http_proxy"
+name = "always"
+default = false
+
+[[items]]
+section = "http_proxy"
+name = "host"
+
+[[items]]
+section = "http_proxy"
+name = "no"
+default-type = "list_type"
+
+[[items]]
+section = "http_proxy"
+name = "passwd"
+
+[[items]]
+section = "http_proxy"
+name = "user"
+
+[[items]]
+section = "logtoprocess"
+name = "command"
+
+[[items]]
+section = "logtoprocess"
+name = "commandexception"
+
+[[items]]
+section = "logtoprocess"
+name = "commandfinish"
+
+[[items]]
+section = "logtoprocess"
+name = "develwarn"
+
+[[items]]
+section = "logtoprocess"
+name = "uiblocked"
+
+[[items]]
+section = "merge"
+name = "checkignored"
+default = "abort"
+
+[[items]]
+section = "merge"
+name = "checkunknown"
+default = "abort"
+
+[[items]]
+section = "merge"
+name = "disable-partial-tools"
+default = false
+experimental = true
+
+[[items]]
+section = "merge"
+name = "followcopies"
+default = true
+
+[[items]]
+section = "merge"
+name = "on-failure"
+default = "continue"
+
+[[items]]
+section = "merge"
+name = "preferancestor"
+default-type = "lambda"
+default = ["*"]
+experimental = true
+
+[[items]]
+section = "merge"
+name = "strict-capability-check"
+default = false
+
+[[items]]
+section = "merge-tools"
+name = ".*"
+generic = true
+
+[[items]]
+section = "merge-tools"
+name = '.*\.args$'
+default = "$local $base $other"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.binary$'
+default = false
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.check$'
+default-type = "list_type"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.checkchanged$'
+default = false
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.executable$'
+default-type = "dynamic"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.fixeol$'
+default = false
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.gui$'
+default = false
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.mergemarkers$'
+default = "basic"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.mergemarkertemplate$'  # take from command-templates.mergemarker
+default-type = "dynamic"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.premerge$'
+default-type = "dynamic"
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.priority$'
+default = 0
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.regappend$'
+default = ""
+generic = true
+priority = -1
+
+[[items]]
+section = "merge-tools"
+name = '.*\.symlink$'
+default = false
+generic = true
+priority = -1
+
+[[items]]
+section = "pager"
+name = "attend-.*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "pager"
+name = "ignore"
+default-type = "list_type"
+
+[[items]]
+section = "pager"
+name = "pager"
+default-type = "dynamic"
+
+[[items]]
+section = "partial-merge-tools"
+name = ".*"
+generic = true
+experimental = true
+
+[[items]]
+section = "partial-merge-tools"
+name = '.*\.args'
+default = "$local $base $other"
+generic = true
+priority = -1
+experimental = true
+
+[[items]]
+section = "partial-merge-tools"
+name = '.*\.disable'
+default = false
+generic = true
+priority = -1
+experimental = true
+
+[[items]]
+section = "partial-merge-tools"
+name = '.*\.executable$'
+default-type = "dynamic"
+generic = true
+priority = -1
+experimental = true
+
+[[items]]
+section = "partial-merge-tools"
+name = '.*\.order'
+default = 0
+generic = true
+priority = -1
+experimental = true
+
+[[items]]
+section = "partial-merge-tools"
+name = '.*\.patterns'
+default-type = "dynamic"
+generic = true
+priority = -1
+experimental = true
+
+[[items]]
+section = "patch"
+name = "eol"
+default = "strict"
+
+[[items]]
+section = "patch"
+name = "fuzz"
+default = 2
+
+[[items]]
+section = "paths"
+name = "[^:]*"
+generic = true
+
+[[items]]
+section = "paths"
+name = ".*:bookmarks.mode"
+default = "default"
+generic = true
+
+[[items]]
+section = "paths"
+name = ".*:multi-urls"
+default = false
+generic = true
+
+[[items]]
+section = "paths"
+name = ".*:pulled-delta-reuse-policy"
+generic = true
+
+[[items]]
+section = "paths"
+name = ".*:pushrev"
+generic = true
+
+[[items]]
+section = "paths"
+name = ".*:pushurl"
+generic = true
+
+[[items]]
+section = "paths"
+name = "default"
+
+[[items]]
+section = "paths"
+name = "default-push"
+
+[[items]]
+section = "phases"
+name = "checksubrepos"
+default = "follow"
+
+[[items]]
+section = "phases"
+name = "new-commit"
+default = "draft"
+
+[[items]]
+section = "phases"
+name = "publish"
+default = true
+
+[[items]]
+section = "profiling"
+name = "enabled"
+default = false
+
+[[items]]
+section = "profiling"
+name = "format"
+default = "text"
+
+[[items]]
+section = "profiling"
+name = "freq"
+default = 1000
+
+[[items]]
+section = "profiling"
+name = "limit"
+default = 30
+
+[[items]]
+section = "profiling"
+name = "nested"
+default = 0
+
+[[items]]
+section = "profiling"
+name = "output"
+
+[[items]]
+section = "profiling"
+name = "showmax"
+default = 0.999
+
+[[items]]
+section = "profiling"
+name = "showmin"
+default-type = "dynamic"
+
+[[items]]
+section = "profiling"
+name = "showtime"
+default = true
+
+[[items]]
+section = "profiling"
+name = "sort"
+default = "inlinetime"
+
+[[items]]
+section = "profiling"
+name = "statformat"
+default = "hotpath"
+
+[[items]]
+section = "profiling"
+name = "time-track"
+default-type = "dynamic"
+
+[[items]]
+section = "profiling"
+name = "type"
+default = "stat"
+
+[[items]]
+section = "progress"
+name = "assume-tty"
+default = false
+
+[[items]]
+section = "progress"
+name = "changedelay"
+default = 1
+
+[[items]]
+section = "progress"
+name = "clear-complete"
+default = true
+
+[[items]]
+section = "progress"
+name = "debug"
+default = false
+
+[[items]]
+section = "progress"
+name = "delay"
+default = 3
+
+[[items]]
+section = "progress"
+name = "disable"
+default = false
+
+[[items]]
+section = "progress"
+name = "estimateinterval"
+default = 60.0
+
+[[items]]
+section = "progress"
+name = "format"
+default-type = "lambda"
+default = [ "topic", "bar", "number", "estimate",]
+
+[[items]]
+section = "progress"
+name = "refresh"
+default = 0.1
+
+[[items]]
+section = "progress"
+name = "width"
+default-type = "dynamic"
+
+[[items]]
+section = "pull"
+name = "confirm"
+default = false
+
+[[items]]
+section = "push"
+name = "pushvars.server"
+default = false
+
+[[items]]
+section = "rebase"
+name = "experimental.inmemory"
+default = false
+
+[[items]]
+section = "rebase"
+name = "singletransaction"
+default = false
+
+[[items]]
+section = "rebase"
+name = "store-source"
+default = true
+experimental = true
+documentation = """Controls creation of a `rebase_source` extra field during rebase.
+When false, no such field is created. This is useful e.g. for incrementally \
+converting changesets and then rebasing them onto an existing repo.
+WARNING: this is an advanced setting reserved for people who know \
+exactly what they are doing. Misuse of this setting can easily \
+result in obsmarker cycles and a vivid headache."""
+
+[[items]]
+section = "rewrite"
+name = "backup-bundle"
+default = true
+alias = [["ui", "history-editing-backup"]]
+
+[[items]]
+section = "rewrite"
+name = "empty-successor"
+default = "skip"
+experimental = true
+
+[[items]]
+section = "rewrite"
+name = "update-timestamp"
+default = false
+
+[[items]]
+section = "rhg"
+name = "cat"
+default = true
+experimental = true
+documentation = """rhg cat has some quirks that need to be ironed out. \
+In particular, the `-r` argument accepts a partial hash, but does not \
+correctly resolve `abcdef` as a potential bookmark, tag or branch name."""
+
+[[items]]
+section = "rhg"
+name = "fallback-exectutable"
+experimental = true
+
+[[items]]
+section = "rhg"
+name = "fallback-immediately"
+default = false
+experimental = true
+
+[[items]]
+section = "rhg"
+name = "ignored-extensions"
+default-type = "list_type"
+experimental = true
+
+[[items]]
+section = "rhg"
+name = "on-unsupported"
+default = "abort"
+experimental = true
+
+[[items]]
+section = "server"
+name = "bookmarks-pushkey-compat"
+default = true
+
+[[items]]
+section = "server"
+name = "bundle1"
+default = true
+
+[[items]]
+section = "server"
+name = "bundle1.pull"
+
+[[items]]
+section = "server"
+name = "bundle1.push"
+
+[[items]]
+section = "server"
+name = "bundle1gd"
+
+[[items]]
+section = "server"
+name = "bundle1gd.pull"
+
+[[items]]
+section = "server"
+name = "bundle1gd.push"
+
+[[items]]
+section = "server"
+name = "bundle2.stream"
+default = true
+alias = [["experimental", "bundle2.stream"]]
+
+[[items]]
+section = "server"
+name = "compressionengines"
+default-type = "list_type"
+
+[[items]]
+section = "server"
+name = "concurrent-push-mode"
+default = "check-related"
+
+[[items]]
+section = "server"
+name = "disablefullbundle"
+default = false
+
+[[items]]
+section = "server"
+name = "maxhttpheaderlen"
+default = 1024
+
+[[items]]
+section = "server"
+name = "preferuncompressed"
+default = false
+
+[[items]]
+section = "server"
+name = "pullbundle"
+default = true
+
+[[items]]
+section = "server"
+name = "streamunbundle"
+default = false
+
+[[items]]
+section = "server"
+name = "uncompressed"
+default = true
+
+[[items]]
+section = "server"
+name = "uncompressedallowsecret"
+default = false
+
+[[items]]
+section = "server"
+name = "validate"
+default = false
+
+[[items]]
+section = "server"
+name = "view"
+default = "served"
+
+[[items]]
+section = "server"
+name = "zliblevel"
+default = -1
+
+[[items]]
+section = "server"
+name = "zstdlevel"
+default = 3
+
+[[items]]
+section = "share"
+name = "pool"
+
+[[items]]
+section = "share"
+name = "poolnaming"
+default = "identity"
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-not-safe"
+default = "abort"
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-not-safe.warn"
+default = true
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-not-safe:verbose-upgrade"
+default = true
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-safe"
+default = "abort"
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-safe.warn"
+default = true
+
+[[items]]
+section = "share"
+name = "safe-mismatch.source-safe:verbose-upgrade"
+default = true
+
+[[items]]
+section = "shelve"
+name = "maxbackups"
+default = 10
+
+[[items]]
+section = "shelve"
+name = "store"
+default = "internal"
+experimental = true
+
+[[items]]
+section = "smtp"
+name = "host"
+
+[[items]]
+section = "smtp"
+name = "local_hostname"
+
+[[items]]
+section = "smtp"
+name = "password"
+
+[[items]]
+section = "smtp"
+name = "port"
+default-type = "dynamic"
+
+[[items]]
+section = "smtp"
+name = "tls"
+default = "none"
+
+[[items]]
+section = "smtp"
+name = "username"
+
+[[items]]
+section = "sparse"
+name = "missingwarning"
+default = true
+experimental = true
+
+[[items]]
+section = "storage"
+name = "dirstate-v2.slow-path"
+default = "abort"
+experimental = true  # experimental as long as format.use-dirstate-v2 is.
+
+[[items]]
+section = "storage"
+name = "new-repo-backend"
+default = "revlogv1"
+experimental = true
+
+[[items]]
+section = "storage"
+name = "revlog.delta-parent-search.candidate-group-chunk-size"
+default = 20
+
+[[items]]
+section = "storage"
+name = "revlog.issue6528.fix-incoming"
+default = true
+
+[[items]]
+section = "storage"
+name = "revlog.optimize-delta-parent-choice"
+default = true
+alias = [["format", "aggressivemergedeltas"]]
+
+[[items]]
+section = "storage"
+name = "revlog.persistent-nodemap.mmap"
+default = true
+
+[[items]]
+section = "storage"
+name = "revlog.persistent-nodemap.slow-path"
+default = "abort"
+
+[[items]]
+section = "storage"
+name = "revlog.reuse-external-delta"
+default = true
+
+[[items]]
+section = "storage"
+name = "revlog.reuse-external-delta-parent"
+documentation = """This option is true unless `format.generaldelta` is set."""
+
+[[items]]
+section = "storage"
+name = "revlog.zlib.level"
+
+[[items]]
+section = "storage"
+name = "revlog.zstd.level"
+
+[[items]]
+section = "subrepos"
+name = "allowed"
+default-type = "dynamic"  # to make backporting simpler
+
+[[items]]
+section = "subrepos"
+name = "git:allowed"
+default-type = "dynamic"
+
+[[items]]
+section = "subrepos"
+name = "hg:allowed"
+default-type = "dynamic"
+
+[[items]]
+section = "subrepos"
+name = "svn:allowed"
+default-type = "dynamic"
+
+[[items]]
+section = "templateconfig"
+name = ".*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "templates"
+name = ".*"
+generic = true
+
+[[items]]
+section = "trusted"
+name = "groups"
+default-type = "list_type"
+
+[[items]]
+section = "trusted"
+name = "users"
+default-type = "list_type"
+
+[[items]]
+section = "ui"
+name = "_usedassubrepo"
+default = false
+
+[[items]]
+section = "ui"
+name = "allowemptycommit"
+default = false
+
+[[items]]
+section = "ui"
+name = "archivemeta"
+default = true
+
+[[items]]
+section = "ui"
+name = "askusername"
+default = false
+
+[[items]]
+section = "ui"
+name = "available-memory"
+
+[[items]]
+section = "ui"
+name = "clonebundlefallback"
+default = false
+
+[[items]]
+section = "ui"
+name = "clonebundleprefers"
+default-type = "list_type"
+
+[[items]]
+section = "ui"
+name = "clonebundles"
+default = true
+
+[[items]]
+section = "ui"
+name = "color"
+default = "auto"
+
+[[items]]
+section = "ui"
+name = "commitsubrepos"
+default = false
+
+[[items]]
+section = "ui"
+name = "debug"
+default = false
+
+[[items]]
+section = "ui"
+name = "debugger"
+
+[[items]]
+section = "ui"
+name = "detailed-exit-code"
+default = false
+experimental = true
+
+[[items]]
+section = "ui"
+name = "editor"
+default-type = "dynamic"
+
+[[items]]
+section = "ui"
+name = "fallbackencoding"
+
+[[items]]
+section = "ui"
+name = "forcecwd"
+
+[[items]]
+section = "ui"
+name = "forcemerge"
+
+[[items]]
+section = "ui"
+name = "formatdebug"
+default = false
+
+[[items]]
+section = "ui"
+name = "formatjson"
+default = false
+
+[[items]]
+section = "ui"
+name = "formatted"
+
+[[items]]
+section = "ui"
+name = "interactive"
+
+[[items]]
+section = "ui"
+name = "interface"
+
+[[items]]
+section = "ui"
+name = "interface.chunkselector"
+
+[[items]]
+section = "ui"
+name = "large-file-limit"
+default = 10485760
+
+[[items]]
+section = "ui"
+name = "logblockedtimes"
+default = false
+
+[[items]]
+section = "ui"
+name = "merge"
+
+[[items]]
+section = "ui"
+name = "mergemarkers"
+default = "basic"
+
+[[items]]
+section = "ui"
+name = "message-output"
+default = "stdio"
+
+[[items]]
+section = "ui"
+name = "nontty"
+default = false
+
+[[items]]
+section = "ui"
+name = "origbackuppath"
+
+[[items]]
+section = "ui"
+name = "paginate"
+default = true
+
+[[items]]
+section = "ui"
+name = "patch"
+
+[[items]]
+section = "ui"
+name = "portablefilenames"
+default = "warn"
+
+[[items]]
+section = "ui"
+name = "promptecho"
+default = false
+
+[[items]]
+section = "ui"
+name = "quiet"
+default = false
+
+[[items]]
+section = "ui"
+name = "quietbookmarkmove"
+default = false
+
+[[items]]
+section = "ui"
+name = "relative-paths"
+default = "legacy"
+
+[[items]]
+section = "ui"
+name = "remotecmd"
+default = "hg"
+
+[[items]]
+section = "ui"
+name = "report_untrusted"
+default = true
+
+[[items]]
+section = "ui"
+name = "rollback"
+default = true
+
+[[items]]
+section = "ui"
+name = "signal-safe-lock"
+default = true
+
+[[items]]
+section = "ui"
+name = "slash"
+default = false
+
+[[items]]
+section = "ui"
+name = "ssh"
+default = "ssh"
+
+[[items]]
+section = "ui"
+name = "ssherrorhint"
+
+[[items]]
+section = "ui"
+name = "statuscopies"
+default = false
+
+[[items]]
+section = "ui"
+name = "strict"
+default = false
+
+[[items]]
+section = "ui"
+name = "style"
+default = ""
+
+[[items]]
+section = "ui"
+name = "supportcontact"
+
+[[items]]
+section = "ui"
+name = "textwidth"
+default = 78
+
+[[items]]
+section = "ui"
+name = "timeout"
+default = "600"
+
+[[items]]
+section = "ui"
+name = "timeout.warn"
+default = 0
+
+[[items]]
+section = "ui"
+name = "timestamp-output"
+default = false
+
+[[items]]
+section = "ui"
+name = "traceback"
+default = false
+
+[[items]]
+section = "ui"
+name = "tweakdefaults"
+default = false
+
+[[items]]
+section = "ui"
+name = "username"
+alias = [["ui", "user"]]
+
+[[items]]
+section = "ui"
+name = "verbose"
+default = false
+
+[[items]]
+section = "verify"
+name = "skipflags"
+default = 0
+
+[[items]]
+section = "web"
+name = "accesslog"
+default = "-"
+
+[[items]]
+section = "web"
+name = "address"
+default = ""
+
+[[items]]
+section = "web"
+name = "allow-archive"
+default-type = "list_type"
+alias = [["web", "allow_archive"]]
+
+[[items]]
+section = "web"
+name = "allow-pull"
+default = true
+alias = [["web", "allowpull"]]
+
+[[items]]
+section = "web"
+name = "allow-push"
+default-type = "list_type"
+alias = [["web", "allow_push"]]
+
+[[items]]
+section = "web"
+name = "allow_read"
+default-type = "list_type"
+
+[[items]]
+section = "web"
+name = "allowbz2"
+default = false
+
+[[items]]
+section = "web"
+name = "allowgz"
+default = false
+
+[[items]]
+section = "web"
+name = "allowzip"
+default = false
+
+[[items]]
+section = "web"
+name = "archivesubrepos"
+default = false
+
+[[items]]
+section = "web"
+name = "baseurl"
+
+[[items]]
+section = "web"
+name = "cacerts"
+
+[[items]]
+section = "web"
+name = "cache"
+default = true
+
+[[items]]
+section = "web"
+name = "certificate"
+
+[[items]]
+section = "web"
+name = "collapse"
+default = false
+
+[[items]]
+section = "web"
+name = "comparisoncontext"
+default = 5
+
+[[items]]
+section = "web"
+name = "contact"
+
+[[items]]
+section = "web"
+name = "csp"
+
+[[items]]
+section = "web"
+name = "deny_push"
+default-type = "list_type"
+
+[[items]]
+section = "web"
+name = "deny_read"
+default-type = "list_type"
+
+[[items]]
+section = "web"
+name = "descend"
+default = true
+
+[[items]]
+section = "web"
+name = "description"
+default = ""
+
+[[items]]
+section = "web"
+name = "encoding"
+default-type = "lazy_module"
+default = "encoding.encoding"
+
+[[items]]
+section = "web"
+name = "errorlog"
+default = "-"
+
+[[items]]
+section = "web"
+name = "guessmime"
+default = false
+
+[[items]]
+section = "web"
+name = "hidden"
+default = false
+
+[[items]]
+section = "web"
+name = "ipv6"
+default = false
+
+[[items]]
+section = "web"
+name = "labels"
+default-type = "list_type"
+
+[[items]]
+section = "web"
+name = "logoimg"
+default = "hglogo.png"
+
+[[items]]
+section = "web"
+name = "logourl"
+default = "https://mercurial-scm.org/"
+
+[[items]]
+section = "web"
+name = "maxchanges"
+default = 10
+
+[[items]]
+section = "web"
+name = "maxfiles"
+default = 10
+
+[[items]]
+section = "web"
+name = "maxshortchanges"
+default = 60
+
+[[items]]
+section = "web"
+name = "motd"
+default = ""
+
+[[items]]
+section = "web"
+name = "name"
+default-type = "dynamic"
+
+[[items]]
+section = "web"
+name = "port"
+default = 8000
+
+[[items]]
+section = "web"
+name = "prefix"
+default = ""
+
+[[items]]
+section = "web"
+name = "push_ssl"
+default = true
+
+[[items]]
+section = "web"
+name = "refreshinterval"
+default = 20
+
+[[items]]
+section = "web"
+name = "server-header"
+
+[[items]]
+section = "web"
+name = "static"
+
+[[items]]
+section = "web"
+name = "staticurl"
+
+[[items]]
+section = "web"
+name = "stripes"
+default = 1
+
+[[items]]
+section = "web"
+name = "style"
+default = "paper"
+
+[[items]]
+section = "web"
+name = "templates"
+
+[[items]]
+section = "web"
+name = "view"
+default = "served"
+experimental = true
+
+[[items]]
+section = "worker"
+name = "backgroundclose"
+default-type = "dynamic"
+
+[[items]]
+section = "worker"
+name = "backgroundclosemaxqueue"
+# Windows defaults to a limit of 512 open files. A buffer of 128
+# should give us enough headway.
+default = 384
+
+[[items]]
+section = "worker"
+name = "backgroundcloseminfilecount"
+default = 2048
+
+[[items]]
+section = "worker"
+name = "backgroundclosethreadcount"
+default = 4
+
+[[items]]
+section = "worker"
+name = "enabled"
+default = true
+
+[[items]]
+section = "worker"
+name = "numcpus"
+
+# Templates and template applications
+
+[[template-applications]]
+template = "diff-options"
+section = "annotate"
+
+[[template-applications]]
+template = "diff-options"
+section = "commands"
+prefix = "commit.interactive"
+
+[[template-applications]]
+template = "diff-options"
+section = "commands"
+prefix = "revert.interactive"
+
+[[template-applications]]
+template = "diff-options"
+section = "diff"
+
+[templates]
+[[templates.diff-options]]
+suffix = "nodates"
+default = false
+
+[[templates.diff-options]]
+suffix = "showfunc"
+default = false
+
+[[templates.diff-options]]
+suffix = "unified"
+
+[[templates.diff-options]]
+suffix = "git"
+default = false
+
+[[templates.diff-options]]
+suffix = "ignorews"
+default = false
+
+[[templates.diff-options]]
+suffix = "ignorewsamount"
+default = false
+
+[[templates.diff-options]]
+suffix = "ignoreblanklines"
+default = false
+
+[[templates.diff-options]]
+suffix = "ignorewseol"
+default = false
+
+[[templates.diff-options]]
+suffix = "nobinary"
+default = false
+
+[[templates.diff-options]]
+suffix = "noprefix"
+default = false
+
+[[templates.diff-options]]
+suffix = "word-diff"
+default = false
+
+# In-core extensions
+
+[[items]]
+section = "blackbox"
+name = "dirty"
+default = false
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "maxsize"
+default = "1 MB"
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "logsource"
+default = false
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "maxfiles"
+default = 7
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "track"
+default-type = "lambda"
+default = ["*"]
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "ignore"
+default-type = "lambda"
+default = ["chgserver", "cmdserver", "extension"]
+in_core_extension = "blackbox"
+
+[[items]]
+section = "blackbox"
+name = "date-format"
+default = ""
+in_core_extension = "blackbox"
--- a/mercurial/extensions.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/extensions.py	Mon Aug 07 11:08:00 2023 +0200
@@ -625,6 +625,10 @@
 
     def __init__(self, container, funcname, wrapper):
         assert callable(wrapper)
+        if not isinstance(funcname, str):
+            msg = b"pass wrappedfunction target name as `str`, not `bytes`"
+            util.nouideprecwarn(msg, b"6.6", stacklevel=2)
+            funcname = pycompat.sysstr(funcname)
         self._container = container
         self._funcname = funcname
         self._wrapper = wrapper
@@ -671,6 +675,11 @@
     """
     assert callable(wrapper)
 
+    if not isinstance(funcname, str):
+        msg = b"pass wrapfunction target name as `str`, not `bytes`"
+        util.nouideprecwarn(msg, b"6.6", stacklevel=2)
+        funcname = pycompat.sysstr(funcname)
+
     origfn = getattr(container, funcname)
     assert callable(origfn)
     if inspect.ismodule(container):
--- a/mercurial/exthelper.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/exthelper.py	Mon Aug 07 11:08:00 2023 +0200
@@ -325,7 +325,7 @@
             # Required, otherwise the function will not be wrapped
             uisetup = eh.finaluisetup
 
-            @eh.wrapfunction(discovery, b'checkheads')
+            @eh.wrapfunction(discovery, 'checkheads')
             def wrapcheckheads(orig, *args, **kwargs):
                 ui.note(b'His head smashed in and his heart cut out')
                 return orig(*args, **kwargs)
--- a/mercurial/localrepo.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/localrepo.py	Mon Aug 07 11:08:00 2023 +0200
@@ -58,6 +58,7 @@
     obsolete,
     pathutil,
     phases,
+    policy,
     pushkey,
     pycompat,
     rcutil,
@@ -3763,7 +3764,11 @@
     if ui.configbool(b'format', b'bookmarks-in-store'):
         requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
 
-    if ui.configbool(b'format', b'use-persistent-nodemap'):
+    # The feature is disabled unless a fast implementation is available.
+    persistent_nodemap_default = policy.importrust('revlog') is not None
+    if ui.configbool(
+        b'format', b'use-persistent-nodemap', persistent_nodemap_default
+    ):
         requirements.add(requirementsmod.NODEMAP_REQUIREMENT)
 
     # if share-safe is enabled, let's create the new repository with the new
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/LICENSE	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Taneli Hukkinen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/README.md	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,182 @@
+[![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli)
+[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli)
+
+# Tomli
+
+> A lil' TOML parser
+
+**Table of Contents**  *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)*
+
+<!-- mdformat-toc start --slug=github --maxlevel=6 --minlevel=2 -->
+
+- [Intro](#intro)
+- [Installation](#installation)
+- [Usage](#usage)
+  - [Parse a TOML string](#parse-a-toml-string)
+  - [Parse a TOML file](#parse-a-toml-file)
+  - [Handle invalid TOML](#handle-invalid-toml)
+  - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats)
+- [FAQ](#faq)
+  - [Why this parser?](#why-this-parser)
+  - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported)
+  - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function)
+  - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types)
+- [Performance](#performance)
+
+<!-- mdformat-toc end -->
+
+## Intro<a name="intro"></a>
+
+Tomli is a Python library for parsing [TOML](https://toml.io).
+Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0).
+
+## Installation<a name="installation"></a>
+
+```bash
+pip install tomli
+```
+
+## Usage<a name="usage"></a>
+
+### Parse a TOML string<a name="parse-a-toml-string"></a>
+
+```python
+import tomli
+
+toml_str = """
+           gretzky = 99
+
+           [kurri]
+           jari = 17
+           """
+
+toml_dict = tomli.loads(toml_str)
+assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}}
+```
+
+### Parse a TOML file<a name="parse-a-toml-file"></a>
+
+```python
+import tomli
+
+with open("path_to_file/conf.toml", "rb") as f:
+    toml_dict = tomli.load(f)
+```
+
+The file must be opened in binary mode (with the `"rb"` flag).
+Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled,
+both of which are required to correctly parse TOML.
+Support for text file objects is deprecated for removal in the next major release.
+
+### Handle invalid TOML<a name="handle-invalid-toml"></a>
+
+```python
+import tomli
+
+try:
+    toml_dict = tomli.loads("]] this is invalid TOML [[")
+except tomli.TOMLDecodeError:
+    print("Yep, definitely not valid.")
+```
+
+Note that while the `TOMLDecodeError` type is public API, error messages of raised instances of it are not.
+Error messages should not be assumed to stay constant across Tomli versions.
+
+### Construct `decimal.Decimal`s from TOML floats<a name="construct-decimaldecimals-from-toml-floats"></a>
+
+```python
+from decimal import Decimal
+import tomli
+
+toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal)
+assert toml_dict["precision-matters"] == Decimal("0.982492")
+```
+
+Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type.
+The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated.
+
+Illegal types include `dict`, `list`, and anything that has the `append` attribute.
+Parsing floats into an illegal type results in undefined behavior.
+
+## FAQ<a name="faq"></a>
+
+### Why this parser?<a name="why-this-parser"></a>
+
+- it's lil'
+- pure Python with zero dependencies
+- the fastest pure Python parser [\*](#performance):
+  15x as fast as [tomlkit](https://pypi.org/project/tomlkit/),
+  2.4x as fast as [toml](https://pypi.org/project/toml/)
+- outputs [basic data types](#how-do-toml-types-map-into-python-types) only
+- 100% spec compliant: passes all tests in
+  [a test set](https://github.com/toml-lang/compliance/pull/8)
+  soon to be merged to the official
+  [compliance tests for TOML](https://github.com/toml-lang/compliance)
+  repository
+- thoroughly tested: 100% branch coverage
+
+### Is comment preserving round-trip parsing supported?<a name="is-comment-preserving-round-trip-parsing-supported"></a>
+
+No.
+
+The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only.
+Preserving comments requires a custom type to be returned so will not be supported,
+at least not by the `tomli.loads` and `tomli.load` functions.
+
+Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need.
+
+### Is there a `dumps`, `write` or `encode` function?<a name="is-there-a-dumps-write-or-encode-function"></a>
+
+[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions.
+
+The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal.
+
+### How do TOML types map into Python types?<a name="how-do-toml-types-map-into-python-types"></a>
+
+| TOML type        | Python type         | Details                                                      |
+| ---------------- | ------------------- | ------------------------------------------------------------ |
+| Document Root    | `dict`              |                                                              |
+| Key              | `str`               |                                                              |
+| String           | `str`               |                                                              |
+| Integer          | `int`               |                                                              |
+| Float            | `float`             |                                                              |
+| Boolean          | `bool`              |                                                              |
+| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` |
+| Local Date-Time  | `datetime.datetime` | `tzinfo` attribute set to `None`                             |
+| Local Date       | `datetime.date`     |                                                              |
+| Local Time       | `datetime.time`     |                                                              |
+| Array            | `list`              |                                                              |
+| Table            | `dict`              |                                                              |
+| Inline Table     | `dict`              |                                                              |
+
+## Performance<a name="performance"></a>
+
+The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers.
+The benchmark can be run with `tox -e benchmark-pypi`.
+Running the benchmark on my personal computer output the following:
+
+```console
+foo@bar:~/dev/tomli$ tox -e benchmark-pypi
+benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2
+benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909'
+benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())'
+2021-07-23
+benchmark-pypi run-test: commands[1] | python --version
+Python 3.8.10
+benchmark-pypi run-test: commands[2] | python benchmark/run.py
+Parsing data.toml 5000 times:
+------------------------------------------------------
+    parser |  exec time | performance (more is better)
+-----------+------------+-----------------------------
+     rtoml |    0.901 s | baseline (100%)
+  pytomlpp |     1.08 s | 83.15%
+     tomli |     3.89 s | 23.15%
+      toml |     9.36 s | 9.63%
+     qtoml |     11.5 s | 7.82%
+   tomlkit |     56.8 s | 1.59%
+```
+
+The parsers are ordered from fastest to slowest, using the fastest parser as baseline.
+Tomli performed the best out of all pure Python TOML parsers,
+losing only to pytomlpp (wraps C++) and rtoml (wraps Rust).
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/__init__.py	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,9 @@
+"""A lil' TOML parser."""
+
+__all__ = ("loads", "load", "TOMLDecodeError")
+__version__ = "1.2.3"  # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from ._parser import TOMLDecodeError, load, loads
+
+# Pretend this exception was created here.
+TOMLDecodeError.__module__ = "tomli"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/_parser.py	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,663 @@
+import string
+from types import MappingProxyType
+from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple
+import warnings
+
+from ._re import (
+    RE_DATETIME,
+    RE_LOCALTIME,
+    RE_NUMBER,
+    match_to_datetime,
+    match_to_localtime,
+    match_to_number,
+)
+from ._types import Key, ParseFloat, Pos
+
+ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
+
+# Neither of these sets include quotation mark or backslash. They are
+# currently handled as separate cases in the parser functions.
+ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t")
+ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n")
+
+ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS
+ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+
+ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS
+
+TOML_WS = frozenset(" \t")
+TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n")
+BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_")
+KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'")
+HEXDIGIT_CHARS = frozenset(string.hexdigits)
+
+BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType(
+    {
+        "\\b": "\u0008",  # backspace
+        "\\t": "\u0009",  # tab
+        "\\n": "\u000A",  # linefeed
+        "\\f": "\u000C",  # form feed
+        "\\r": "\u000D",  # carriage return
+        '\\"': "\u0022",  # quote
+        "\\\\": "\u005C",  # backslash
+    }
+)
+
+
+class TOMLDecodeError(ValueError):
+    """An error raised if a document is not valid TOML."""
+
+
+def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]:
+    """Parse TOML from a binary file object."""
+    s_bytes = fp.read()
+    try:
+        s = s_bytes.decode()
+    except AttributeError:
+        warnings.warn(
+            "Text file object support is deprecated in favor of binary file objects."
+            ' Use `open("foo.toml", "rb")` to open the file in binary mode.',
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        s = s_bytes  # type: ignore[assignment]
+    return loads(s, parse_float=parse_float)
+
+
+def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]:  # noqa: C901
+    """Parse TOML from a string."""
+
+    # The spec allows converting "\r\n" to "\n", even in string
+    # literals. Let's do so to simplify parsing.
+    src = s.replace("\r\n", "\n")
+    pos = 0
+    out = Output(NestedDict(), Flags())
+    header: Key = ()
+
+    # Parse one statement at a time
+    # (typically means one line in TOML source)
+    while True:
+        # 1. Skip line leading whitespace
+        pos = skip_chars(src, pos, TOML_WS)
+
+        # 2. Parse rules. Expect one of the following:
+        #    - end of file
+        #    - end of line
+        #    - comment
+        #    - key/value pair
+        #    - append dict to list (and move to its namespace)
+        #    - create dict (and move to its namespace)
+        # Skip trailing whitespace when applicable.
+        try:
+            char = src[pos]
+        except IndexError:
+            break
+        if char == "\n":
+            pos += 1
+            continue
+        if char in KEY_INITIAL_CHARS:
+            pos = key_value_rule(src, pos, out, header, parse_float)
+            pos = skip_chars(src, pos, TOML_WS)
+        elif char == "[":
+            try:
+                second_char: Optional[str] = src[pos + 1]
+            except IndexError:
+                second_char = None
+            if second_char == "[":
+                pos, header = create_list_rule(src, pos, out)
+            else:
+                pos, header = create_dict_rule(src, pos, out)
+            pos = skip_chars(src, pos, TOML_WS)
+        elif char != "#":
+            raise suffixed_err(src, pos, "Invalid statement")
+
+        # 3. Skip comment
+        pos = skip_comment(src, pos)
+
+        # 4. Expect end of line or end of file
+        try:
+            char = src[pos]
+        except IndexError:
+            break
+        if char != "\n":
+            raise suffixed_err(
+                src, pos, "Expected newline or end of document after a statement"
+            )
+        pos += 1
+
+    return out.data.dict
+
+
+class Flags:
+    """Flags that map to parsed keys/namespaces."""
+
+    # Marks an immutable namespace (inline array or inline table).
+    FROZEN = 0
+    # Marks a nest that has been explicitly created and can no longer
+    # be opened using the "[table]" syntax.
+    EXPLICIT_NEST = 1
+
+    def __init__(self) -> None:
+        self._flags: Dict[str, dict] = {}
+
+    def unset_all(self, key: Key) -> None:
+        cont = self._flags
+        for k in key[:-1]:
+            if k not in cont:
+                return
+            cont = cont[k]["nested"]
+        cont.pop(key[-1], None)
+
+    def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None:
+        cont = self._flags
+        for k in head_key:
+            if k not in cont:
+                cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+            cont = cont[k]["nested"]
+        for k in rel_key:
+            if k in cont:
+                cont[k]["flags"].add(flag)
+            else:
+                cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}}
+            cont = cont[k]["nested"]
+
+    def set(self, key: Key, flag: int, *, recursive: bool) -> None:  # noqa: A003
+        cont = self._flags
+        key_parent, key_stem = key[:-1], key[-1]
+        for k in key_parent:
+            if k not in cont:
+                cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+            cont = cont[k]["nested"]
+        if key_stem not in cont:
+            cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+        cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag)
+
+    def is_(self, key: Key, flag: int) -> bool:
+        if not key:
+            return False  # document root has no flags
+        cont = self._flags
+        for k in key[:-1]:
+            if k not in cont:
+                return False
+            inner_cont = cont[k]
+            if flag in inner_cont["recursive_flags"]:
+                return True
+            cont = inner_cont["nested"]
+        key_stem = key[-1]
+        if key_stem in cont:
+            cont = cont[key_stem]
+            return flag in cont["flags"] or flag in cont["recursive_flags"]
+        return False
+
+
+class NestedDict:
+    def __init__(self) -> None:
+        # The parsed content of the TOML document
+        self.dict: Dict[str, Any] = {}
+
+    def get_or_create_nest(
+        self,
+        key: Key,
+        *,
+        access_lists: bool = True,
+    ) -> dict:
+        cont: Any = self.dict
+        for k in key:
+            if k not in cont:
+                cont[k] = {}
+            cont = cont[k]
+            if access_lists and isinstance(cont, list):
+                cont = cont[-1]
+            if not isinstance(cont, dict):
+                raise KeyError("There is no nest behind this key")
+        return cont
+
+    def append_nest_to_list(self, key: Key) -> None:
+        cont = self.get_or_create_nest(key[:-1])
+        last_key = key[-1]
+        if last_key in cont:
+            list_ = cont[last_key]
+            try:
+                list_.append({})
+            except AttributeError:
+                raise KeyError("An object other than list found behind this key")
+        else:
+            cont[last_key] = [{}]
+
+
+class Output(NamedTuple):
+    data: NestedDict
+    flags: Flags
+
+
+def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos:
+    try:
+        while src[pos] in chars:
+            pos += 1
+    except IndexError:
+        pass
+    return pos
+
+
+def skip_until(
+    src: str,
+    pos: Pos,
+    expect: str,
+    *,
+    error_on: FrozenSet[str],
+    error_on_eof: bool,
+) -> Pos:
+    try:
+        new_pos = src.index(expect, pos)
+    except ValueError:
+        new_pos = len(src)
+        if error_on_eof:
+            raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None
+
+    if not error_on.isdisjoint(src[pos:new_pos]):
+        while src[pos] not in error_on:
+            pos += 1
+        raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}")
+    return new_pos
+
+
+def skip_comment(src: str, pos: Pos) -> Pos:
+    try:
+        char: Optional[str] = src[pos]
+    except IndexError:
+        char = None
+    if char == "#":
+        return skip_until(
+            src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False
+        )
+    return pos
+
+
+def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos:
+    while True:
+        pos_before_skip = pos
+        pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+        pos = skip_comment(src, pos)
+        if pos == pos_before_skip:
+            return pos
+
+
+def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+    pos += 1  # Skip "["
+    pos = skip_chars(src, pos, TOML_WS)
+    pos, key = parse_key(src, pos)
+
+    if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN):
+        raise suffixed_err(src, pos, f"Can not declare {key} twice")
+    out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+    try:
+        out.data.get_or_create_nest(key)
+    except KeyError:
+        raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+    if not src.startswith("]", pos):
+        raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration')
+    return pos + 1, key
+
+
+def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]:
+    pos += 2  # Skip "[["
+    pos = skip_chars(src, pos, TOML_WS)
+    pos, key = parse_key(src, pos)
+
+    if out.flags.is_(key, Flags.FROZEN):
+        raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+    # Free the namespace now that it points to another empty list item...
+    out.flags.unset_all(key)
+    # ...but this key precisely is still prohibited from table declaration
+    out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+    try:
+        out.data.append_nest_to_list(key)
+    except KeyError:
+        raise suffixed_err(src, pos, "Can not overwrite a value") from None
+
+    if not src.startswith("]]", pos):
+        raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration')
+    return pos + 2, key
+
+
+def key_value_rule(
+    src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat
+) -> Pos:
+    pos, key, value = parse_key_value_pair(src, pos, parse_float)
+    key_parent, key_stem = key[:-1], key[-1]
+    abs_key_parent = header + key_parent
+
+    if out.flags.is_(abs_key_parent, Flags.FROZEN):
+        raise suffixed_err(
+            src, pos, f"Can not mutate immutable namespace {abs_key_parent}"
+        )
+    # Containers in the relative path can't be opened with the table syntax after this
+    out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST)
+    try:
+        nest = out.data.get_or_create_nest(abs_key_parent)
+    except KeyError:
+        raise suffixed_err(src, pos, "Can not overwrite a value") from None
+    if key_stem in nest:
+        raise suffixed_err(src, pos, "Can not overwrite a value")
+    # Mark inline table and array namespaces recursively immutable
+    if isinstance(value, (dict, list)):
+        out.flags.set(header + key, Flags.FROZEN, recursive=True)
+    nest[key_stem] = value
+    return pos
+
+
+def parse_key_value_pair(
+    src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Key, Any]:
+    pos, key = parse_key(src, pos)
+    try:
+        char: Optional[str] = src[pos]
+    except IndexError:
+        char = None
+    if char != "=":
+        raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair')
+    pos += 1
+    pos = skip_chars(src, pos, TOML_WS)
+    pos, value = parse_value(src, pos, parse_float)
+    return pos, key, value
+
+
+def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]:
+    pos, key_part = parse_key_part(src, pos)
+    key: Key = (key_part,)
+    pos = skip_chars(src, pos, TOML_WS)
+    while True:
+        try:
+            char: Optional[str] = src[pos]
+        except IndexError:
+            char = None
+        if char != ".":
+            return pos, key
+        pos += 1
+        pos = skip_chars(src, pos, TOML_WS)
+        pos, key_part = parse_key_part(src, pos)
+        key += (key_part,)
+        pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]:
+    try:
+        char: Optional[str] = src[pos]
+    except IndexError:
+        char = None
+    if char in BARE_KEY_CHARS:
+        start_pos = pos
+        pos = skip_chars(src, pos, BARE_KEY_CHARS)
+        return pos, src[start_pos:pos]
+    if char == "'":
+        return parse_literal_str(src, pos)
+    if char == '"':
+        return parse_one_line_basic_str(src, pos)
+    raise suffixed_err(src, pos, "Invalid initial character for a key part")
+
+
+def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+    pos += 1
+    return parse_basic_str(src, pos, multiline=False)
+
+
+def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]:
+    pos += 1
+    array: list = []
+
+    pos = skip_comments_and_array_ws(src, pos)
+    if src.startswith("]", pos):
+        return pos + 1, array
+    while True:
+        pos, val = parse_value(src, pos, parse_float)
+        array.append(val)
+        pos = skip_comments_and_array_ws(src, pos)
+
+        c = src[pos : pos + 1]
+        if c == "]":
+            return pos + 1, array
+        if c != ",":
+            raise suffixed_err(src, pos, "Unclosed array")
+        pos += 1
+
+        pos = skip_comments_and_array_ws(src, pos)
+        if src.startswith("]", pos):
+            return pos + 1, array
+
+
+def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]:
+    pos += 1
+    nested_dict = NestedDict()
+    flags = Flags()
+
+    pos = skip_chars(src, pos, TOML_WS)
+    if src.startswith("}", pos):
+        return pos + 1, nested_dict.dict
+    while True:
+        pos, key, value = parse_key_value_pair(src, pos, parse_float)
+        key_parent, key_stem = key[:-1], key[-1]
+        if flags.is_(key, Flags.FROZEN):
+            raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}")
+        try:
+            nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
+        except KeyError:
+            raise suffixed_err(src, pos, "Can not overwrite a value") from None
+        if key_stem in nest:
+            raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}")
+        nest[key_stem] = value
+        pos = skip_chars(src, pos, TOML_WS)
+        c = src[pos : pos + 1]
+        if c == "}":
+            return pos + 1, nested_dict.dict
+        if c != ",":
+            raise suffixed_err(src, pos, "Unclosed inline table")
+        if isinstance(value, (dict, list)):
+            flags.set(key, Flags.FROZEN, recursive=True)
+        pos += 1
+        pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_basic_str_escape(  # noqa: C901
+    src: str, pos: Pos, *, multiline: bool = False
+) -> Tuple[Pos, str]:
+    escape_id = src[pos : pos + 2]
+    pos += 2
+    if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}:
+        # Skip whitespace until next non-whitespace character or end of
+        # the doc. Error if non-whitespace is found before newline.
+        if escape_id != "\\\n":
+            pos = skip_chars(src, pos, TOML_WS)
+            try:
+                char = src[pos]
+            except IndexError:
+                return pos, ""
+            if char != "\n":
+                raise suffixed_err(src, pos, 'Unescaped "\\" in a string')
+            pos += 1
+        pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+        return pos, ""
+    if escape_id == "\\u":
+        return parse_hex_char(src, pos, 4)
+    if escape_id == "\\U":
+        return parse_hex_char(src, pos, 8)
+    try:
+        return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
+    except KeyError:
+        if len(escape_id) != 2:
+            raise suffixed_err(src, pos, "Unterminated string") from None
+        raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None
+
+
+def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]:
+    return parse_basic_str_escape(src, pos, multiline=True)
+
+
+def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]:
+    hex_str = src[pos : pos + hex_len]
+    if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str):
+        raise suffixed_err(src, pos, "Invalid hex value")
+    pos += hex_len
+    hex_int = int(hex_str, 16)
+    if not is_unicode_scalar_value(hex_int):
+        raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value")
+    return pos, chr(hex_int)
+
+
+def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]:
+    pos += 1  # Skip starting apostrophe
+    start_pos = pos
+    pos = skip_until(
+        src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True
+    )
+    return pos + 1, src[start_pos:pos]  # Skip ending apostrophe
+
+
+def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]:
+    pos += 3
+    if src.startswith("\n", pos):
+        pos += 1
+
+    if literal:
+        delim = "'"
+        end_pos = skip_until(
+            src,
+            pos,
+            "'''",
+            error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
+            error_on_eof=True,
+        )
+        result = src[pos:end_pos]
+        pos = end_pos + 3
+    else:
+        delim = '"'
+        pos, result = parse_basic_str(src, pos, multiline=True)
+
+    # Add at maximum two extra apostrophes/quotes if the end sequence
+    # is 4 or 5 chars long instead of just 3.
+    if not src.startswith(delim, pos):
+        return pos, result
+    pos += 1
+    if not src.startswith(delim, pos):
+        return pos, result + delim
+    pos += 1
+    return pos, result + (delim * 2)
+
+
+def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]:
+    if multiline:
+        error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+        parse_escapes = parse_basic_str_escape_multiline
+    else:
+        error_on = ILLEGAL_BASIC_STR_CHARS
+        parse_escapes = parse_basic_str_escape
+    result = ""
+    start_pos = pos
+    while True:
+        try:
+            char = src[pos]
+        except IndexError:
+            raise suffixed_err(src, pos, "Unterminated string") from None
+        if char == '"':
+            if not multiline:
+                return pos + 1, result + src[start_pos:pos]
+            if src.startswith('"""', pos):
+                return pos + 3, result + src[start_pos:pos]
+            pos += 1
+            continue
+        if char == "\\":
+            result += src[start_pos:pos]
+            pos, parsed_escape = parse_escapes(src, pos)
+            result += parsed_escape
+            start_pos = pos
+            continue
+        if char in error_on:
+            raise suffixed_err(src, pos, f"Illegal character {char!r}")
+        pos += 1
+
+
+def parse_value(  # noqa: C901
+    src: str, pos: Pos, parse_float: ParseFloat
+) -> Tuple[Pos, Any]:
+    try:
+        char: Optional[str] = src[pos]
+    except IndexError:
+        char = None
+
+    # Basic strings
+    if char == '"':
+        if src.startswith('"""', pos):
+            return parse_multiline_str(src, pos, literal=False)
+        return parse_one_line_basic_str(src, pos)
+
+    # Literal strings
+    if char == "'":
+        if src.startswith("'''", pos):
+            return parse_multiline_str(src, pos, literal=True)
+        return parse_literal_str(src, pos)
+
+    # Booleans
+    if char == "t":
+        if src.startswith("true", pos):
+            return pos + 4, True
+    if char == "f":
+        if src.startswith("false", pos):
+            return pos + 5, False
+
+    # Dates and times
+    datetime_match = RE_DATETIME.match(src, pos)
+    if datetime_match:
+        try:
+            datetime_obj = match_to_datetime(datetime_match)
+        except ValueError as e:
+            raise suffixed_err(src, pos, "Invalid date or datetime") from e
+        return datetime_match.end(), datetime_obj
+    localtime_match = RE_LOCALTIME.match(src, pos)
+    if localtime_match:
+        return localtime_match.end(), match_to_localtime(localtime_match)
+
+    # Integers and "normal" floats.
+    # The regex will greedily match any type starting with a decimal
+    # char, so needs to be located after handling of dates and times.
+    number_match = RE_NUMBER.match(src, pos)
+    if number_match:
+        return number_match.end(), match_to_number(number_match, parse_float)
+
+    # Arrays
+    if char == "[":
+        return parse_array(src, pos, parse_float)
+
+    # Inline tables
+    if char == "{":
+        return parse_inline_table(src, pos, parse_float)
+
+    # Special floats
+    first_three = src[pos : pos + 3]
+    if first_three in {"inf", "nan"}:
+        return pos + 3, parse_float(first_three)
+    first_four = src[pos : pos + 4]
+    if first_four in {"-inf", "+inf", "-nan", "+nan"}:
+        return pos + 4, parse_float(first_four)
+
+    raise suffixed_err(src, pos, "Invalid value")
+
+
+def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError:
+    """Return a `TOMLDecodeError` where error message is suffixed with
+    coordinates in source."""
+
+    def coord_repr(src: str, pos: Pos) -> str:
+        if pos >= len(src):
+            return "end of document"
+        line = src.count("\n", 0, pos) + 1
+        if line == 1:
+            column = pos + 1
+        else:
+            column = pos - src.rindex("\n", 0, pos)
+        return f"line {line}, column {column}"
+
+    return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})")
+
+
+def is_unicode_scalar_value(codepoint: int) -> bool:
+    return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/_re.py	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,101 @@
+from datetime import date, datetime, time, timedelta, timezone, tzinfo
+from functools import lru_cache
+import re
+from typing import Any, Optional, Union
+
+from ._types import ParseFloat
+
+# E.g.
+# - 00:32:00.999999
+# - 00:32:00
+_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?"
+
+RE_NUMBER = re.compile(
+    r"""
+0
+(?:
+    x[0-9A-Fa-f](?:_?[0-9A-Fa-f])*   # hex
+    |
+    b[01](?:_?[01])*                 # bin
+    |
+    o[0-7](?:_?[0-7])*               # oct
+)
+|
+[+-]?(?:0|[1-9](?:_?[0-9])*)         # dec, integer part
+(?P<floatpart>
+    (?:\.[0-9](?:_?[0-9])*)?         # optional fractional part
+    (?:[eE][+-]?[0-9](?:_?[0-9])*)?  # optional exponent part
+)
+""",
+    flags=re.VERBOSE,
+)
+RE_LOCALTIME = re.compile(_TIME_RE_STR)
+RE_DATETIME = re.compile(
+    fr"""
+([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])  # date, e.g. 1988-10-27
+(?:
+    [Tt ]
+    {_TIME_RE_STR}
+    (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))?  # optional time offset
+)?
+""",
+    flags=re.VERBOSE,
+)
+
+
+def match_to_datetime(match: "re.Match") -> Union[datetime, date]:
+    """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
+
+    Raises ValueError if the match does not correspond to a valid date
+    or datetime.
+    """
+    (
+        year_str,
+        month_str,
+        day_str,
+        hour_str,
+        minute_str,
+        sec_str,
+        micros_str,
+        zulu_time,
+        offset_sign_str,
+        offset_hour_str,
+        offset_minute_str,
+    ) = match.groups()
+    year, month, day = int(year_str), int(month_str), int(day_str)
+    if hour_str is None:
+        return date(year, month, day)
+    hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
+    micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+    if offset_sign_str:
+        tz: Optional[tzinfo] = cached_tz(
+            offset_hour_str, offset_minute_str, offset_sign_str
+        )
+    elif zulu_time:
+        tz = timezone.utc
+    else:  # local date-time
+        tz = None
+    return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
+
+
+@lru_cache(maxsize=None)
+def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone:
+    sign = 1 if sign_str == "+" else -1
+    return timezone(
+        timedelta(
+            hours=sign * int(hour_str),
+            minutes=sign * int(minute_str),
+        )
+    )
+
+
+def match_to_localtime(match: "re.Match") -> time:
+    hour_str, minute_str, sec_str, micros_str = match.groups()
+    micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+    return time(int(hour_str), int(minute_str), int(sec_str), micros)
+
+
+def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any:
+    if match.group("floatpart"):
+        return parse_float(match.group())
+    return int(match.group(), 0)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/_types.py	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,6 @@
+from typing import Any, Callable, Tuple
+
+# Type annotations
+ParseFloat = Callable[[str], Any]
+Key = Tuple[str, ...]
+Pos = int
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/tomli/py.typed	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,1 @@
+# Marker file for PEP 561
--- a/mercurial/transaction.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/transaction.py	Mon Aug 07 11:08:00 2023 +0200
@@ -867,7 +867,7 @@
                 self._vfsmap,
                 entries,
                 self._backupentries,
-                False,
+                unlink=True,
                 checkambigfiles=self._checkambigfiles,
             )
             self._report(_(b"rollback completed\n"))
--- a/mercurial/ui.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/mercurial/ui.py	Mon Aug 07 11:08:00 2023 +0200
@@ -47,6 +47,7 @@
     configitems,
     encoding,
     error,
+    extensions,
     formatter,
     loggingutil,
     progress,
@@ -659,6 +660,12 @@
         item = self._knownconfig.get(section, {}).get(name)
         alternates = [(section, name)]
 
+        if item is not None and item.in_core_extension is not None:
+            # Only return the default for an in-core extension item if said
+            # extension is enabled
+            if item.in_core_extension in extensions.extensions(self):
+                item = None
+
         if item is not None:
             alternates.extend(item.alias)
             if callable(item.default):
--- a/relnotes/next	Mon Aug 07 11:05:43 2023 +0200
+++ b/relnotes/next	Mon Aug 07 11:08:00 2023 +0200
@@ -13,6 +13,8 @@
 
 == Backwards Compatibility Changes ==
 
+* remove the experimental infinite push extension
+
 == Internal API Changes ==
 
 == Miscellaneous ==
--- a/rust/Cargo.lock	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/Cargo.lock	Mon Aug 07 11:08:00 2023 +0200
@@ -476,6 +476,12 @@
 
 [[package]]
 name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
+[[package]]
+name = "hashbrown"
 version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
@@ -517,7 +523,7 @@
  "derive_more",
  "flate2",
  "format-bytes",
- "hashbrown",
+ "hashbrown 0.13.1",
  "home",
  "im-rc",
  "itertools",
@@ -535,9 +541,11 @@
  "regex",
  "same-file",
  "self_cell",
+ "serde",
  "sha-1 0.10.0",
  "tempfile",
  "thread_local",
+ "toml",
  "twox-hash",
  "zstd",
 ]
@@ -610,6 +618,16 @@
 ]
 
 [[package]]
+name = "indexmap"
+version = "1.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
+dependencies = [
+ "autocfg",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
 name = "instant"
 version = "0.1.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -749,6 +767,15 @@
 ]
 
 [[package]]
+name = "nom8"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
 name = "num-integer"
 version = "0.1.45"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1107,6 +1134,35 @@
 checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
 
 [[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
 name = "sha-1"
 version = "0.9.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1160,9 +1216,9 @@
 
 [[package]]
 name = "syn"
-version = "1.0.103"
+version = "1.0.109"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1213,6 +1269,40 @@
 ]
 
 [[package]]
+name = "toml"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.18.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
+dependencies = [
+ "indexmap",
+ "nom8",
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+]
+
+[[package]]
 name = "twox-hash"
 version = "1.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
--- a/rust/hg-core/Cargo.toml	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/hg-core/Cargo.toml	Mon Aug 07 11:08:00 2023 +0200
@@ -26,10 +26,12 @@
 rayon = "1.7.0"
 regex = "1.7.0"
 self_cell = "1.0"
+serde = { version = "1.0", features = ["derive"] }
 sha-1 = "0.10.0"
 twox-hash = "1.6.3"
 same-file = "1.0.6"
 tempfile = "3.3.0"
+toml = "0.6"
 thread_local = "1.1.4"
 crossbeam-channel = "0.5.6"
 log = "0.4.17"
@@ -46,5 +48,5 @@
 default-features = false
 
 [dev-dependencies]
-clap = { version = "4.0.24", features = ["derive"] }
+clap = { version = "~4.0", features = ["derive"] }
 pretty_assertions = "1.1.0"
--- a/rust/hg-core/src/config/config.rs	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/config/config_items.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -0,0 +1,692 @@
+//! Code for parsing default Mercurial config items.
+use itertools::Itertools;
+use serde::Deserialize;
+
+use crate::{errors::HgError, exit_codes, FastHashMap};
+
+/// Corresponds to the structure of `mercurial/configitems.toml`.
+#[derive(Debug, Deserialize)]
+pub struct ConfigItems {
+    items: Vec<DefaultConfigItem>,
+    templates: FastHashMap<String, Vec<TemplateItem>>,
+    #[serde(rename = "template-applications")]
+    template_applications: Vec<TemplateApplication>,
+}
+
+/// Corresponds to a config item declaration in `mercurial/configitems.toml`.
+#[derive(Clone, Debug, PartialEq, Deserialize)]
+#[serde(try_from = "RawDefaultConfigItem")]
+pub struct DefaultConfigItem {
+    /// Section of the config the item is in (e.g. `[merge-tools]`)
+    section: String,
+    /// Name of the item (e.g. `meld.gui`)
+    name: String,
+    /// Default value (can be dynamic, see [`DefaultConfigItemType`])
+    default: Option<DefaultConfigItemType>,
+    /// If the config option is generic (e.g. `merge-tools.*`), defines
+    /// the priority of this item relative to other generic items.
+    /// If we're looking for <pattern>, then all generic items within the same
+    /// section will be sorted by order of priority, and the first regex match
+    /// against `name` is returned.
+    #[serde(default)]
+    priority: Option<isize>,
+    /// Aliases, if any. Each alias is a tuple of `(section, name)` for each
+    /// option that is aliased to this one.
+    #[serde(default)]
+    alias: Vec<(String, String)>,
+    /// Whether the config item is marked as experimental
+    #[serde(default)]
+    experimental: bool,
+    /// The (possibly empty) docstring for the item
+    #[serde(default)]
+    documentation: String,
+    /// Whether the item is part of an in-core extension. This allows us to
+    /// hide them if the extension is not enabled, to preserve legacy
+    /// behavior.
+    #[serde(default)]
+    in_core_extension: Option<String>,
+}
+
+/// Corresponds to the raw (i.e. on disk) structure of config items. Used as
+/// an intermediate step in deserialization.
+#[derive(Clone, Debug, Deserialize)]
+struct RawDefaultConfigItem {
+    section: String,
+    name: String,
+    default: Option<toml::Value>,
+    #[serde(rename = "default-type")]
+    default_type: Option<String>,
+    #[serde(default)]
+    priority: isize,
+    #[serde(default)]
+    generic: bool,
+    #[serde(default)]
+    alias: Vec<(String, String)>,
+    #[serde(default)]
+    experimental: bool,
+    #[serde(default)]
+    documentation: String,
+    #[serde(default)]
+    in_core_extension: Option<String>,
+}
+
+impl TryFrom<RawDefaultConfigItem> for DefaultConfigItem {
+    type Error = HgError;
+
+    fn try_from(value: RawDefaultConfigItem) -> Result<Self, Self::Error> {
+        Ok(Self {
+            section: value.section,
+            name: value.name,
+            default: raw_default_to_concrete(
+                value.default_type,
+                value.default,
+            )?,
+            priority: if value.generic {
+                Some(value.priority)
+            } else {
+                None
+            },
+            alias: value.alias,
+            experimental: value.experimental,
+            documentation: value.documentation,
+            in_core_extension: value.in_core_extension,
+        })
+    }
+}
+
+impl DefaultConfigItem {
+    fn is_generic(&self) -> bool {
+        self.priority.is_some()
+    }
+
+    pub fn in_core_extension(&self) -> Option<&str> {
+        self.in_core_extension.as_deref()
+    }
+
+    pub fn section(&self) -> &str {
+        self.section.as_ref()
+    }
+}
+
+impl<'a> TryFrom<&'a DefaultConfigItem> for Option<&'a str> {
+    type Error = HgError;
+
+    fn try_from(
+        value: &'a DefaultConfigItem,
+    ) -> Result<Option<&'a str>, Self::Error> {
+        match &value.default {
+            Some(default) => {
+                let err = HgError::abort(
+                    format!(
+                        "programming error: wrong query on config item '{}.{}'",
+                        value.section,
+                        value.name
+                    ),
+                    exit_codes::ABORT,
+                    Some(format!(
+                        "asked for '&str', type of default is '{}'",
+                        default.type_str()
+                    )),
+                );
+                match default {
+                    DefaultConfigItemType::Primitive(toml::Value::String(
+                        s,
+                    )) => Ok(Some(s)),
+                    _ => Err(err),
+                }
+            }
+            None => Ok(None),
+        }
+    }
+}
+
+impl TryFrom<&DefaultConfigItem> for Option<bool> {
+    type Error = HgError;
+
+    fn try_from(value: &DefaultConfigItem) -> Result<Self, Self::Error> {
+        match &value.default {
+            Some(default) => {
+                let err = HgError::abort(
+                    format!(
+                        "programming error: wrong query on config item '{}.{}'",
+                        value.section,
+                        value.name
+                    ),
+                    exit_codes::ABORT,
+                    Some(format!(
+                        "asked for 'bool', type of default is '{}'",
+                        default.type_str()
+                    )),
+                );
+                match default {
+                    DefaultConfigItemType::Primitive(
+                        toml::Value::Boolean(b),
+                    ) => Ok(Some(*b)),
+                    _ => Err(err),
+                }
+            }
+            None => Ok(Some(false)),
+        }
+    }
+}
+
+impl TryFrom<&DefaultConfigItem> for Option<u32> {
+    type Error = HgError;
+
+    fn try_from(value: &DefaultConfigItem) -> Result<Self, Self::Error> {
+        match &value.default {
+            Some(default) => {
+                let err = HgError::abort(
+                    format!(
+                        "programming error: wrong query on config item '{}.{}'",
+                        value.section,
+                        value.name
+                    ),
+                    exit_codes::ABORT,
+                    Some(format!(
+                        "asked for 'u32', type of default is '{}'",
+                        default.type_str()
+                    )),
+                );
+                match default {
+                    DefaultConfigItemType::Primitive(
+                        toml::Value::Integer(b),
+                    ) => {
+                        Ok(Some((*b).try_into().expect("TOML integer to u32")))
+                    }
+                    _ => Err(err),
+                }
+            }
+            None => Ok(None),
+        }
+    }
+}
+
+impl TryFrom<&DefaultConfigItem> for Option<u64> {
+    type Error = HgError;
+
+    fn try_from(value: &DefaultConfigItem) -> Result<Self, Self::Error> {
+        match &value.default {
+            Some(default) => {
+                let err = HgError::abort(
+                    format!(
+                        "programming error: wrong query on config item '{}.{}'",
+                        value.section,
+                        value.name
+                    ),
+                    exit_codes::ABORT,
+                    Some(format!(
+                        "asked for 'u64', type of default is '{}'",
+                        default.type_str()
+                    )),
+                );
+                match default {
+                    DefaultConfigItemType::Primitive(
+                        toml::Value::Integer(b),
+                    ) => {
+                        Ok(Some((*b).try_into().expect("TOML integer to u64")))
+                    }
+                    _ => Err(err),
+                }
+            }
+            None => Ok(None),
+        }
+    }
+}
+
+/// Allows abstracting over more complex default values than just primitives.
+/// The former `configitems.py` contained some dynamic code that is encoded
+/// in this enum.
+#[derive(Debug, PartialEq, Clone, Deserialize)]
+pub enum DefaultConfigItemType {
+    /// Some primitive type (string, integer, boolean)
+    Primitive(toml::Value),
+    /// A dynamic value that will be given by the code at runtime
+    Dynamic,
+    /// An lazily-returned array (possibly only relevant in the Python impl)
+    /// Example: `lambda: [b"zstd", b"zlib"]`
+    Lambda(Vec<String>),
+    /// For now, a special case for `web.encoding` that points to the
+    /// `encoding.encoding` module in the Python impl so that local encoding
+    /// is correctly resolved at runtime
+    LazyModule(String),
+    ListType,
+}
+
+impl DefaultConfigItemType {
+    pub fn type_str(&self) -> &str {
+        match self {
+            DefaultConfigItemType::Primitive(primitive) => {
+                primitive.type_str()
+            }
+            DefaultConfigItemType::Dynamic => "dynamic",
+            DefaultConfigItemType::Lambda(_) => "lambda",
+            DefaultConfigItemType::LazyModule(_) => "lazy_module",
+            DefaultConfigItemType::ListType => "list_type",
+        }
+    }
+}
+
+/// Most of the fields are shared with [`DefaultConfigItem`].
+#[derive(Debug, Clone, Deserialize)]
+#[serde(try_from = "RawTemplateItem")]
+struct TemplateItem {
+    suffix: String,
+    default: Option<DefaultConfigItemType>,
+    priority: Option<isize>,
+    #[serde(default)]
+    alias: Vec<(String, String)>,
+    #[serde(default)]
+    experimental: bool,
+    #[serde(default)]
+    documentation: String,
+}
+
+/// Corresponds to the raw (i.e. on disk) representation of a template item.
+/// Used as an intermediate step in deserialization.
+#[derive(Clone, Debug, Deserialize)]
+struct RawTemplateItem {
+    suffix: String,
+    default: Option<toml::Value>,
+    #[serde(rename = "default-type")]
+    default_type: Option<String>,
+    #[serde(default)]
+    priority: isize,
+    #[serde(default)]
+    generic: bool,
+    #[serde(default)]
+    alias: Vec<(String, String)>,
+    #[serde(default)]
+    experimental: bool,
+    #[serde(default)]
+    documentation: String,
+}
+
+impl TemplateItem {
+    fn into_default_item(
+        self,
+        application: TemplateApplication,
+    ) -> DefaultConfigItem {
+        DefaultConfigItem {
+            section: application.section,
+            name: application
+                .prefix
+                .map(|prefix| format!("{}.{}", prefix, self.suffix))
+                .unwrap_or(self.suffix),
+            default: self.default,
+            priority: self.priority,
+            alias: self.alias,
+            experimental: self.experimental,
+            documentation: self.documentation,
+            in_core_extension: None,
+        }
+    }
+}
+
+impl TryFrom<RawTemplateItem> for TemplateItem {
+    type Error = HgError;
+
+    fn try_from(value: RawTemplateItem) -> Result<Self, Self::Error> {
+        Ok(Self {
+            suffix: value.suffix,
+            default: raw_default_to_concrete(
+                value.default_type,
+                value.default,
+            )?,
+            priority: if value.generic {
+                Some(value.priority)
+            } else {
+                None
+            },
+            alias: value.alias,
+            experimental: value.experimental,
+            documentation: value.documentation,
+        })
+    }
+}
+
+/// Transforms the on-disk string-based representation of complex default types
+/// to the concrete [`DefaultconfigItemType`].
+fn raw_default_to_concrete(
+    default_type: Option<String>,
+    default: Option<toml::Value>,
+) -> Result<Option<DefaultConfigItemType>, HgError> {
+    Ok(match default_type.as_deref() {
+        None => default.as_ref().map(|default| {
+            DefaultConfigItemType::Primitive(default.to_owned())
+        }),
+        Some("dynamic") => Some(DefaultConfigItemType::Dynamic),
+        Some("list_type") => Some(DefaultConfigItemType::ListType),
+        Some("lambda") => match &default {
+            Some(default) => Some(DefaultConfigItemType::Lambda(
+                default.to_owned().try_into().map_err(|e| {
+                    HgError::abort(
+                        e.to_string(),
+                        exit_codes::ABORT,
+                        Some("Check 'mercurial/configitems.toml'".into()),
+                    )
+                })?,
+            )),
+            None => {
+                return Err(HgError::abort(
+                    "lambda defined with no return value".to_string(),
+                    exit_codes::ABORT,
+                    Some("Check 'mercurial/configitems.toml'".into()),
+                ))
+            }
+        },
+        Some("lazy_module") => match &default {
+            Some(default) => {
+                Some(DefaultConfigItemType::LazyModule(match default {
+                    toml::Value::String(module) => module.to_owned(),
+                    _ => {
+                        return Err(HgError::abort(
+                            "lazy_module module name should be a string"
+                                .to_string(),
+                            exit_codes::ABORT,
+                            Some("Check 'mercurial/configitems.toml'".into()),
+                        ))
+                    }
+                }))
+            }
+            None => {
+                return Err(HgError::abort(
+                    "lazy_module should have a default value".to_string(),
+                    exit_codes::ABORT,
+                    Some("Check 'mercurial/configitems.toml'".into()),
+                ))
+            }
+        },
+        Some(invalid) => {
+            return Err(HgError::abort(
+                format!("invalid default_type '{}'", invalid),
+                exit_codes::ABORT,
+                Some("Check 'mercurial/configitems.toml'".into()),
+            ))
+        }
+    })
+}
+
+#[derive(Debug, Clone, Deserialize)]
+struct TemplateApplication {
+    template: String,
+    section: String,
+    #[serde(default)]
+    prefix: Option<String>,
+}
+
+/// Represents the (dynamic) set of default core Mercurial config items from
+/// `mercurial/configitems.toml`.
+#[derive(Clone, Debug, Default)]
+pub struct DefaultConfig {
+    /// Mapping of section -> (mapping of name -> item)
+    items: FastHashMap<String, FastHashMap<String, DefaultConfigItem>>,
+}
+
+impl DefaultConfig {
+    pub fn empty() -> DefaultConfig {
+        Self {
+            items: Default::default(),
+        }
+    }
+
+    /// Returns `Self`, given the contents of `mercurial/configitems.toml`
+    #[logging_timer::time("trace")]
+    pub fn from_contents(contents: &str) -> Result<Self, HgError> {
+        let mut from_file: ConfigItems =
+            toml::from_str(contents).map_err(|e| {
+                HgError::abort(
+                    e.to_string(),
+                    exit_codes::ABORT,
+                    Some("Check 'mercurial/configitems.toml'".into()),
+                )
+            })?;
+
+        let mut flat_items = from_file.items;
+
+        for application in from_file.template_applications.drain(..) {
+            match from_file.templates.get(&application.template) {
+                None => return Err(
+                    HgError::abort(
+                        format!(
+                            "template application refers to undefined template '{}'",
+                            application.template
+                        ),
+                        exit_codes::ABORT,
+                        Some("Check 'mercurial/configitems.toml'".into())
+                    )
+                ),
+                Some(template_items) => {
+                    for template_item in template_items {
+                        flat_items.push(
+                            template_item
+                                .clone()
+                                .into_default_item(application.clone()),
+                        )
+                    }
+                }
+            };
+        }
+
+        let items = flat_items.into_iter().fold(
+            FastHashMap::default(),
+            |mut acc, item| {
+                acc.entry(item.section.to_owned())
+                    .or_insert_with(|| {
+                        let mut section = FastHashMap::default();
+                        section.insert(item.name.to_owned(), item.to_owned());
+                        section
+                    })
+                    .insert(item.name.to_owned(), item);
+                acc
+            },
+        );
+
+        Ok(Self { items })
+    }
+
+    /// Return the default config item that matches `section` and `item`.
+    pub fn get(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Option<&DefaultConfigItem> {
+        // Core items must be valid UTF-8
+        let section = String::from_utf8_lossy(section);
+        let section_map = self.items.get(section.as_ref())?;
+        let item_name_lossy = String::from_utf8_lossy(item);
+        match section_map.get(item_name_lossy.as_ref()) {
+            Some(item) => Some(item),
+            None => {
+                for generic_item in section_map
+                    .values()
+                    .filter(|item| item.is_generic())
+                    .sorted_by_key(|item| match item.priority {
+                        Some(priority) => (priority, &item.name),
+                        _ => unreachable!(),
+                    })
+                {
+                    if regex::bytes::Regex::new(&generic_item.name)
+                        .expect("invalid regex in configitems")
+                        .is_match(item)
+                    {
+                        return Some(generic_item);
+                    }
+                }
+                None
+            }
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::config::config_items::{
+        DefaultConfigItem, DefaultConfigItemType,
+    };
+
+    use super::DefaultConfig;
+
+    #[test]
+    fn test_config_read() {
+        let contents = r#"
+[[items]]
+section = "alias"
+name = "abcd.*"
+default = 3
+generic = true
+priority = -1
+
+[[items]]
+section = "alias"
+name = ".*"
+default-type = "dynamic"
+generic = true
+
+[[items]]
+section = "cmdserver"
+name = "track-log"
+default-type = "lambda"
+default = [ "chgserver", "cmdserver", "repocache",]
+
+[[items]]
+section = "chgserver"
+name = "idletimeout"
+default = 3600
+
+[[items]]
+section = "cmdserver"
+name = "message-encodings"
+default-type = "list_type"
+
+[[items]]
+section = "web"
+name = "encoding"
+default-type = "lazy_module"
+default = "encoding.encoding"
+
+[[items]]
+section = "command-templates"
+name = "graphnode"
+alias = [["ui", "graphnodetemplate"]]
+documentation = """This is a docstring.
+This is another line \
+but this is not."""
+
+[[items]]
+section = "censor"
+name = "policy"
+default = "abort"
+experimental = true
+
+[[template-applications]]
+template = "diff-options"
+section = "commands"
+prefix = "revert.interactive"
+
+[[template-applications]]
+template = "diff-options"
+section = "diff"
+
+[templates]
+[[templates.diff-options]]
+suffix = "nodates"
+default = false
+
+[[templates.diff-options]]
+suffix = "showfunc"
+default = false
+
+[[templates.diff-options]]
+suffix = "unified"
+"#;
+        let res = DefaultConfig::from_contents(contents);
+        let config = match res {
+            Ok(config) => config,
+            Err(e) => panic!("{}", e),
+        };
+        let expected = DefaultConfigItem {
+            section: "censor".into(),
+            name: "policy".into(),
+            default: Some(DefaultConfigItemType::Primitive("abort".into())),
+            priority: None,
+            alias: vec![],
+            experimental: true,
+            documentation: "".into(),
+            in_core_extension: None,
+        };
+        assert_eq!(config.get(b"censor", b"policy"), Some(&expected));
+
+        // Test generic priority. The `.*` pattern is wider than `abcd.*`, but
+        // `abcd.*` has priority, so it should match first.
+        let expected = DefaultConfigItem {
+            section: "alias".into(),
+            name: "abcd.*".into(),
+            default: Some(DefaultConfigItemType::Primitive(3.into())),
+            priority: Some(-1),
+            alias: vec![],
+            experimental: false,
+            documentation: "".into(),
+            in_core_extension: None,
+        };
+        assert_eq!(config.get(b"alias", b"abcdsomething"), Some(&expected));
+
+        //... but if it doesn't, we should fallback to `.*`
+        let expected = DefaultConfigItem {
+            section: "alias".into(),
+            name: ".*".into(),
+            default: Some(DefaultConfigItemType::Dynamic),
+            priority: Some(0),
+            alias: vec![],
+            experimental: false,
+            documentation: "".into(),
+            in_core_extension: None,
+        };
+        assert_eq!(config.get(b"alias", b"something"), Some(&expected));
+
+        let expected = DefaultConfigItem {
+            section: "chgserver".into(),
+            name: "idletimeout".into(),
+            default: Some(DefaultConfigItemType::Primitive(3600.into())),
+            priority: None,
+            alias: vec![],
+            experimental: false,
+            documentation: "".into(),
+            in_core_extension: None,
+        };
+        assert_eq!(config.get(b"chgserver", b"idletimeout"), Some(&expected));
+
+        let expected = DefaultConfigItem {
+            section: "cmdserver".into(),
+            name: "track-log".into(),
+            default: Some(DefaultConfigItemType::Lambda(vec![
+                "chgserver".into(),
+                "cmdserver".into(),
+                "repocache".into(),
+            ])),
+            priority: None,
+            alias: vec![],
+            experimental: false,
+            documentation: "".into(),
+            in_core_extension: None,
+        };
+        assert_eq!(config.get(b"cmdserver", b"track-log"), Some(&expected));
+
+        let expected = DefaultConfigItem {
+            section: "command-templates".into(),
+            name: "graphnode".into(),
+            default: None,
+            priority: None,
+            alias: vec![("ui".into(), "graphnodetemplate".into())],
+            experimental: false,
+            documentation:
+                "This is a docstring.\nThis is another line but this is not."
+                    .into(),
+            in_core_extension: None,
+        };
+        assert_eq!(
+            config.get(b"command-templates", b"graphnode"),
+            Some(&expected)
+        );
+    }
+}
--- a/rust/hg-core/src/config/layer.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/hg-core/src/config/layer.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -304,8 +304,9 @@
     CommandLineColor,
     /// From environment variables like `$PAGER` or `$EDITOR`
     Environment(Vec<u8>),
-    /* TODO defaults (configitems.py)
-     * TODO extensions
+    /// From configitems.toml
+    Defaults,
+    /* TODO extensions
      * TODO Python resources?
      * Others? */
 }
@@ -323,6 +324,9 @@
             ConfigOrigin::Tweakdefaults => {
                 write_bytes!(out, b"ui.tweakdefaults")
             }
+            ConfigOrigin::Defaults => {
+                write_bytes!(out, b"configitems.toml")
+            }
         }
     }
 }
--- a/rust/hg-core/src/config/mod.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/hg-core/src/config/mod.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -9,14 +9,19 @@
 
 //! Mercurial config parsing and interfaces.
 
+pub mod config_items;
 mod layer;
 mod plain_info;
 mod values;
 pub use layer::{ConfigError, ConfigOrigin, ConfigParseError};
+use lazy_static::lazy_static;
 pub use plain_info::PlainInfo;
 
+use self::config_items::DefaultConfig;
+use self::config_items::DefaultConfigItem;
 use self::layer::ConfigLayer;
 use self::layer::ConfigValue;
+use crate::errors::HgError;
 use crate::errors::{HgResultExt, IoResultExt};
 use crate::utils::files::get_bytes_from_os_str;
 use format_bytes::{write_bytes, DisplayBytes};
@@ -26,6 +31,14 @@
 use std::path::{Path, PathBuf};
 use std::str;
 
+lazy_static! {
+    static ref DEFAULT_CONFIG: Result<DefaultConfig, HgError> = {
+        DefaultConfig::from_contents(include_str!(
+            "../../../../mercurial/configitems.toml"
+        ))
+    };
+}
+
 /// Holds the config values for the current repository
 /// TODO update this docstring once we support more sources
 #[derive(Clone)]
@@ -347,13 +360,50 @@
         self.plain = plain;
     }
 
+    /// Returns the default value for the given config item, if any.
+    pub fn get_default(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<Option<&DefaultConfigItem>, HgError> {
+        let default_config = DEFAULT_CONFIG.as_ref().map_err(|e| {
+            HgError::abort(
+                e.to_string(),
+                crate::exit_codes::ABORT,
+                Some("`mercurial/configitems.toml` is not valid".into()),
+            )
+        })?;
+        let default_opt = default_config.get(section, item);
+        Ok(default_opt.filter(|default| {
+            default
+                .in_core_extension()
+                .map(|extension| {
+                    // Only return the default for an in-core extension item
+                    // if said extension is enabled
+                    self.is_extension_enabled(extension.as_bytes())
+                })
+                .unwrap_or(true)
+        }))
+    }
+
+    /// Return the config item that corresponds to a section + item, a function
+    /// to parse from the raw bytes to the expected type (which is passed as
+    /// a string only to make debugging easier).
+    /// Used by higher-level methods like `get_bool`.
+    ///
+    /// `fallback_to_default` controls whether the default value (if any) is
+    /// returned if nothing is found.
     fn get_parse<'config, T: 'config>(
         &'config self,
         section: &[u8],
         item: &[u8],
         expected_type: &'static str,
         parse: impl Fn(&'config [u8]) -> Option<T>,
-    ) -> Result<Option<T>, ConfigValueParseError> {
+        fallback_to_default: bool,
+    ) -> Result<Option<T>, HgError>
+    where
+        Option<T>: TryFrom<&'config DefaultConfigItem, Error = HgError>,
+    {
         match self.get_inner(section, item) {
             Some((layer, v)) => match parse(&v.bytes) {
                 Some(b) => Ok(Some(b)),
@@ -364,22 +414,82 @@
                     section: section.to_owned(),
                     item: item.to_owned(),
                     expected_type,
-                })),
+                })
+                .into()),
             },
-            None => Ok(None),
+            None => {
+                if !fallback_to_default {
+                    return Ok(None);
+                }
+                match self.get_default(section, item)? {
+                    Some(default) => Ok(default.try_into()?),
+                    None => {
+                        self.print_devel_warning(section, item)?;
+                        Ok(None)
+                    }
+                }
+            }
         }
     }
 
+    fn print_devel_warning(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<(), HgError> {
+        let warn_all = self.get_bool(b"devel", b"all-warnings")?;
+        let warn_specific = self.get_bool(b"devel", b"warn-config-unknown")?;
+        if !warn_all || !warn_specific {
+            // We technically shouldn't print anything here since it's not
+            // the concern of `hg-core`.
+            //
+            // We're printing directly to stderr since development warnings
+            // are not on by default and surfacing this to consumer crates
+            // (like `rhg`) would be more difficult, probably requiring
+            // something à la `log` crate.
+            //
+            // TODO maybe figure out a way of exposing a "warnings" channel
+            // that consumer crates can hook into. It would be useful for
+            // all other warnings that `hg-core` could expose.
+            eprintln!(
+                "devel-warn: accessing unregistered config item: '{}.{}'",
+                String::from_utf8_lossy(section),
+                String::from_utf8_lossy(item),
+            );
+        }
+        Ok(())
+    }
+
     /// Returns an `Err` if the first value found is not a valid UTF-8 string.
     /// Otherwise, returns an `Ok(value)` if found, or `None`.
     pub fn get_str(
         &self,
         section: &[u8],
         item: &[u8],
-    ) -> Result<Option<&str>, ConfigValueParseError> {
-        self.get_parse(section, item, "ASCII or UTF-8 string", |value| {
-            str::from_utf8(value).ok()
-        })
+    ) -> Result<Option<&str>, HgError> {
+        self.get_parse(
+            section,
+            item,
+            "ASCII or UTF-8 string",
+            |value| str::from_utf8(value).ok(),
+            true,
+        )
+    }
+
+    /// Same as `get_str`, but doesn't fall back to the default `configitem`
+    /// if not defined in the user config.
+    pub fn get_str_no_default(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<Option<&str>, HgError> {
+        self.get_parse(
+            section,
+            item,
+            "ASCII or UTF-8 string",
+            |value| str::from_utf8(value).ok(),
+            false,
+        )
     }
 
     /// Returns an `Err` if the first value found is not a valid unsigned
@@ -388,10 +498,14 @@
         &self,
         section: &[u8],
         item: &[u8],
-    ) -> Result<Option<u32>, ConfigValueParseError> {
-        self.get_parse(section, item, "valid integer", |value| {
-            str::from_utf8(value).ok()?.parse().ok()
-        })
+    ) -> Result<Option<u32>, HgError> {
+        self.get_parse(
+            section,
+            item,
+            "valid integer",
+            |value| str::from_utf8(value).ok()?.parse().ok(),
+            true,
+        )
     }
 
     /// Returns an `Err` if the first value found is not a valid file size
@@ -401,8 +515,14 @@
         &self,
         section: &[u8],
         item: &[u8],
-    ) -> Result<Option<u64>, ConfigValueParseError> {
-        self.get_parse(section, item, "byte quantity", values::parse_byte_size)
+    ) -> Result<Option<u64>, HgError> {
+        self.get_parse(
+            section,
+            item,
+            "byte quantity",
+            values::parse_byte_size,
+            true,
+        )
     }
 
     /// Returns an `Err` if the first value found is not a valid boolean.
@@ -412,8 +532,18 @@
         &self,
         section: &[u8],
         item: &[u8],
-    ) -> Result<Option<bool>, ConfigValueParseError> {
-        self.get_parse(section, item, "boolean", values::parse_bool)
+    ) -> Result<Option<bool>, HgError> {
+        self.get_parse(section, item, "boolean", values::parse_bool, true)
+    }
+
+    /// Same as `get_option`, but doesn't fall back to the default `configitem`
+    /// if not defined in the user config.
+    pub fn get_option_no_default(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<Option<bool>, HgError> {
+        self.get_parse(section, item, "boolean", values::parse_bool, false)
     }
 
     /// Returns the corresponding boolean in the config. Returns `Ok(false)`
@@ -422,10 +552,20 @@
         &self,
         section: &[u8],
         item: &[u8],
-    ) -> Result<bool, ConfigValueParseError> {
+    ) -> Result<bool, HgError> {
         Ok(self.get_option(section, item)?.unwrap_or(false))
     }
 
+    /// Same as `get_bool`, but doesn't fall back to the default `configitem`
+    /// if not defined in the user config.
+    pub fn get_bool_no_default(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<bool, HgError> {
+        Ok(self.get_option_no_default(section, item)?.unwrap_or(false))
+    }
+
     /// Returns `true` if the extension is enabled, `false` otherwise
     pub fn is_extension_enabled(&self, extension: &[u8]) -> bool {
         let value = self.get(b"extensions", extension);
--- a/rust/rhg/src/blackbox.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/rhg/src/blackbox.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -7,12 +7,6 @@
 use hg::utils::{files::get_bytes_from_os_str, shell_quote};
 use std::ffi::OsString;
 
-const ONE_MEBIBYTE: u64 = 1 << 20;
-
-// TODO: somehow keep defaults in sync with `configitem` in `hgext/blackbox.py`
-const DEFAULT_MAX_SIZE: u64 = ONE_MEBIBYTE;
-const DEFAULT_MAX_FILES: u32 = 7;
-
 // Python does not support %.3f, only %f
 const DEFAULT_DATE_FORMAT: &str = "%Y-%m-%d %H:%M:%S%.3f";
 
@@ -53,8 +47,7 @@
         process_start_time: &'a ProcessStartTime,
     ) -> Result<Self, HgError> {
         let configured = if let Ok(repo) = invocation.repo {
-            if invocation.config.get(b"extensions", b"blackbox").is_none() {
-                // The extension is not enabled
+            if !invocation.config.is_extension_enabled(b"blackbox") {
                 None
             } else {
                 Some(ConfiguredBlackbox {
@@ -62,15 +55,28 @@
                     max_size: invocation
                         .config
                         .get_byte_size(b"blackbox", b"maxsize")?
-                        .unwrap_or(DEFAULT_MAX_SIZE),
+                        .expect(
+                            "blackbox.maxsize should have a default value",
+                        ),
                     max_files: invocation
                         .config
                         .get_u32(b"blackbox", b"maxfiles")?
-                        .unwrap_or(DEFAULT_MAX_FILES),
+                        .expect(
+                            "blackbox.maxfiles should have a default value",
+                        ),
                     date_format: invocation
                         .config
                         .get_str(b"blackbox", b"date-format")?
-                        .unwrap_or(DEFAULT_DATE_FORMAT),
+                        .map(|f| {
+                            if f.is_empty() {
+                                DEFAULT_DATE_FORMAT
+                            } else {
+                                f
+                            }
+                        })
+                        .expect(
+                            "blackbox.date-format should have a default value",
+                        ),
                 })
             }
         } else {
--- a/rust/rhg/src/commands/cat.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/rhg/src/commands/cat.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -32,9 +32,8 @@
 
 #[logging_timer::time("trace")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
-    let cat_enabled_default = true;
-    let cat_enabled = invocation.config.get_option(b"rhg", b"cat")?;
-    if !cat_enabled.unwrap_or(cat_enabled_default) {
+    let cat_enabled = invocation.config.get_bool(b"rhg", b"cat")?;
+    if !cat_enabled {
         return Err(CommandError::unsupported(
             "cat is disabled in rhg (enable it with 'rhg.cat = true' \
             or enable fallback with 'rhg.on-unsupported = fallback')",
--- a/rust/rhg/src/commands/status.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/rhg/src/commands/status.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -7,7 +7,8 @@
 
 use crate::error::CommandError;
 use crate::ui::{
-    format_pattern_file_warning, print_narrow_sparse_warnings, Ui,
+    format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths,
+    RelativePaths, Ui,
 };
 use crate::utils::path_utils::RelativizePaths;
 use clap::Arg;
@@ -360,13 +361,26 @@
                 }
             }
         }
-        let relative_paths = config
+
+        let relative_status = config
             .get_option(b"commands", b"status.relative")?
-            .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
+            .expect("commands.status.relative should have a default value");
+
+        let relativize_paths = relative_status || {
+            // TODO should be dependent on whether patterns are passed once
+            // we support those.
+            // See in Python code with `getuipathfn` usage in `commands.py`.
+            let legacy_relative_behavior = false;
+            match relative_paths(invocation.config)? {
+                RelativePaths::Legacy => legacy_relative_behavior,
+                RelativePaths::Bool(v) => v,
+            }
+        };
+
         let output = DisplayStatusPaths {
             ui,
             no_status,
-            relativize: if relative_paths {
+            relativize: if relativize_paths {
                 Some(RelativizePaths::new(repo)?)
             } else {
                 None
--- a/rust/rhg/src/main.rs	Mon Aug 07 11:05:43 2023 +0200
+++ b/rust/rhg/src/main.rs	Mon Aug 07 11:08:00 2023 +0200
@@ -86,7 +86,8 @@
         // Mercurial allows users to define generic hooks for commands,
         // fallback if any are detected
         let item = format!("{}-{}", prefix, subcommand_name);
-        let hook_for_command = config.get_str(b"hooks", item.as_bytes())?;
+        let hook_for_command =
+            config.get_str_no_default(b"hooks", item.as_bytes())?;
         if hook_for_command.is_some() {
             let msg = format!("{}-{} hook defined", prefix, subcommand_name);
             return Err(CommandError::unsupported(msg));
@@ -349,11 +350,7 @@
             &argv,
             &initial_current_dir,
             &ui,
-            OnUnsupported::Fallback {
-                executable: config
-                    .get(b"rhg", b"fallback-executable")
-                    .map(ToOwned::to_owned),
-            },
+            OnUnsupported::fallback(config),
             Err(CommandError::unsupported(
                 "`rhg.fallback-immediately is true`",
             )),
@@ -662,6 +659,18 @@
 impl OnUnsupported {
     const DEFAULT: Self = OnUnsupported::Abort;
 
+    fn fallback_executable(config: &Config) -> Option<Vec<u8>> {
+        config
+            .get(b"rhg", b"fallback-executable")
+            .map(|x| x.to_owned())
+    }
+
+    fn fallback(config: &Config) -> Self {
+        OnUnsupported::Fallback {
+            executable: Self::fallback_executable(config),
+        }
+    }
+
     fn from_config(config: &Config) -> Self {
         match config
             .get(b"rhg", b"on-unsupported")
@@ -670,11 +679,7 @@
         {
             Some(b"abort") => OnUnsupported::Abort,
             Some(b"abort-silent") => OnUnsupported::AbortSilent,
-            Some(b"fallback") => OnUnsupported::Fallback {
-                executable: config
-                    .get(b"rhg", b"fallback-executable")
-                    .map(|x| x.to_owned()),
-            },
+            Some(b"fallback") => Self::fallback(config),
             None => Self::DEFAULT,
             Some(_) => {
                 // TODO: warn about unknown config value
--- a/setup.py	Mon Aug 07 11:05:43 2023 +0200
+++ b/setup.py	Mon Aug 07 11:08:00 2023 +0200
@@ -1306,6 +1306,7 @@
     'mercurial.templates',
     'mercurial.thirdparty',
     'mercurial.thirdparty.attr',
+    'mercurial.thirdparty.tomli',
     'mercurial.thirdparty.zope',
     'mercurial.thirdparty.zope.interface',
     'mercurial.upgrade_utils',
@@ -1320,7 +1321,6 @@
     'hgext.git',
     'hgext.highlight',
     'hgext.hooklib',
-    'hgext.infinitepush',
     'hgext.largefiles',
     'hgext.lfs',
     'hgext.narrow',
@@ -1643,6 +1643,7 @@
 
 packagedata = {
     'mercurial': [
+        'configitems.toml',
         'locale/*/LC_MESSAGES/hg.mo',
         'dummycert.pem',
     ],
--- a/tests/library-infinitepush.sh	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,33 +0,0 @@
-scratchnodes() {
-  for node in `find ../repo/.hg/scratchbranches/index/nodemap/* | sort`; do
-     echo ${node##*/} `cat $node`
-  done
-}
-
-scratchbookmarks() {
-  for bookmark in `find ../repo/.hg/scratchbranches/index/bookmarkmap/* -type f | sort`; do
-     echo "${bookmark##*/bookmarkmap/} `cat $bookmark`"
-  done
-}
-
-setupcommon() {
-  cat >> $HGRCPATH << EOF
-[extensions]
-infinitepush=
-[infinitepush]
-branchpattern=re:scratch/.*
-deprecation-abort=no
-deprecation-message=yes
-
-EOF
-}
-
-setupserver() {
-cat >> .hg/hgrc << EOF
-[infinitepush]
-server=yes
-indextype=disk
-storetype=disk
-reponame=babar
-EOF
-}
--- a/tests/test-bundle2-exchange.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-bundle2-exchange.t	Mon Aug 07 11:08:00 2023 +0200
@@ -917,7 +917,7 @@
   >         raise error.Abort(b"Lock should not be taken")
   >     return orig(repo, *args, **kwargs)
   > def extsetup(ui):
-  >    extensions.wrapfunction(bundle2, b'processbundle', checklock)
+  >    extensions.wrapfunction(bundle2, 'processbundle', checklock)
   > EOF
 
   $ hg init lazylock
--- a/tests/test-byteify-strings.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-byteify-strings.t	Mon Aug 07 11:08:00 2023 +0200
@@ -110,19 +110,6 @@
   > def f():
   >     pass
   > EOF
-  $ byteify_strings testfile.py --allow-attr-methods
-  setattr(o, 'a', 1)
-  util.setattr(o, 'ae', 1)
-  util.getattr(o, 'alksjdf', b'default')
-  util.addattr(o, 'asdf')
-  util.hasattr(o, 'lksjdf', b'default')
-  util.safehasattr(o, 'lksjdf', b'default')
-  @eh.wrapfunction(func, 'lksjdf')
-  def f():
-      pass
-  @eh.wrapclass(klass, 'lksjdf')
-  def f():
-      pass
 
 Test without attr*() as methods
 
@@ -142,15 +129,15 @@
   > EOF
   $ byteify_strings testfile.py
   setattr(o, 'a', 1)
-  util.setattr(o, b'ae', 1)
-  util.getattr(o, b'alksjdf', b'default')
-  util.addattr(o, b'asdf')
-  util.hasattr(o, b'lksjdf', b'default')
-  util.safehasattr(o, b'lksjdf', b'default')
-  @eh.wrapfunction(func, b'lksjdf')
+  util.setattr(o, 'ae', 1)
+  util.getattr(o, 'alksjdf', b'default')
+  util.addattr(o, 'asdf')
+  util.hasattr(o, 'lksjdf', b'default')
+  util.safehasattr(o, 'lksjdf', b'default')
+  @eh.wrapfunction(func, 'lksjdf')
   def f():
       pass
-  @eh.wrapclass(klass, b'lksjdf')
+  @eh.wrapclass(klass, 'lksjdf')
   def f():
       pass
 
--- a/tests/test-check-py3-compat.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-check-py3-compat.t	Mon Aug 07 11:08:00 2023 +0200
@@ -10,7 +10,6 @@
   > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py \
   > | sed 's/[0-9][0-9]*)$/*)/'
   hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob) (?)
-  hgext/infinitepush/sqlindexapi.py: error importing: <*Error> No module named 'mysql' (error at sqlindexapi.py:*) (glob) (?)
   mercurial/scmwindows.py: error importing: <ValueError> _type_ 'v' not supported (error at win32.py:*) (no-windows !)
   mercurial/win32.py: error importing: <ValueError> _type_ 'v' not supported (error at win32.py:*) (no-windows !)
   mercurial/windows.py: error importing: <*Error> No module named 'msvcrt' (error at windows.py:*) (glob) (no-windows !)
--- a/tests/test-commandserver.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-commandserver.t	Mon Aug 07 11:08:00 2023 +0200
@@ -923,7 +923,7 @@
   >         raise Exception('crash')
   >     return orig(ui, repo, conn, createcmdserver, prereposetups)
   > def extsetup(ui):
-  >     extensions.wrapfunction(commandserver, b'_serverequest', _serverequest)
+  >     extensions.wrapfunction(commandserver, '_serverequest', _serverequest)
   > EOF
   $ cat <<EOF >> .hg/hgrc
   > [extensions]
--- a/tests/test-contrib-perf.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-contrib-perf.t	Mon Aug 07 11:08:00 2023 +0200
@@ -301,23 +301,38 @@
 
   $ hg perfparents --config perf.stub=no --config perf.run-limits='0.000000001-15'
   ! wall * comb * user * sys * (best of 15) (glob)
+  ! wall * comb * user * sys * (max of 15) (glob)
+  ! wall * comb * user * sys * (avg of 15) (glob)
+  ! wall * comb * user * sys * (median of 15) (glob)
 
 Multiple entries
 
   $ hg perfparents --config perf.stub=no --config perf.run-limits='500000-1, 0.000000001-50'
   ! wall * comb * user * sys * (best of 50) (glob)
+  ! wall * comb * user * sys 0.000000 (max of 50) (glob)
+  ! wall * comb * user * sys 0.000000 (avg of 50) (glob)
+  ! wall * comb * user * sys 0.000000 (median of 50) (glob)
 
 error case are ignored
 
   $ hg perfparents --config perf.stub=no --config perf.run-limits='500, 0.000000001-50'
   malformatted run limit entry, missing "-": 500
   ! wall * comb * user * sys * (best of 50) (glob)
+  ! wall * comb * user * sys * (max of 50) (glob)
+  ! wall * comb * user * sys * (avg of 50) (glob)
+  ! wall * comb * user * sys * (median of 50) (glob)
   $ hg perfparents --config perf.stub=no --config perf.run-limits='aaa-120, 0.000000001-50'
   malformatted run limit entry, could not convert string to float: 'aaa': aaa-120
   ! wall * comb * user * sys * (best of 50) (glob)
+  ! wall * comb * user * sys * (max of 50) (glob)
+  ! wall * comb * user * sys * (avg of 50) (glob)
+  ! wall * comb * user * sys * (median of 50) (glob)
   $ hg perfparents --config perf.stub=no --config perf.run-limits='120-aaaaaa, 0.000000001-50'
   malformatted run limit entry, invalid literal for int() with base 10: 'aaaaaa': 120-aaaaaa
   ! wall * comb * user * sys * (best of 50) (glob)
+  ! wall * comb * user * sys * (max of 50) (glob)
+  ! wall * comb * user * sys * (avg of 50) (glob)
+  ! wall * comb * user * sys * (median of 50) (glob)
 
 test actual output
 ------------------
@@ -326,6 +341,9 @@
 
   $ hg perfheads --config perf.stub=no
   ! wall * comb * user * sys * (best of *) (glob)
+  ! wall * comb * user * sys * (max of *) (glob)
+  ! wall * comb * user * sys * (avg of *) (glob)
+  ! wall * comb * user * sys * (median of *) (glob)
 
 detailed output:
 
@@ -343,8 +361,23 @@
   $ hg perfheads --template json --config perf.stub=no
   [
    {
+    "avg.comb": *, (glob)
+    "avg.count": *, (glob)
+    "avg.sys": *, (glob)
+    "avg.user": *, (glob)
+    "avg.wall": *, (glob)
     "comb": *, (glob)
     "count": *, (glob)
+    "max.comb": *, (glob)
+    "max.count": *, (glob)
+    "max.sys": *, (glob)
+    "max.user": *, (glob)
+    "max.wall": *, (glob)
+    "median.comb": *, (glob)
+    "median.count": *, (glob)
+    "median.sys": *, (glob)
+    "median.user": *, (glob)
+    "median.wall": *, (glob)
     "sys": *, (glob)
     "user": *, (glob)
     "wall": * (glob)
@@ -386,13 +419,22 @@
 
   $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=0
   ! wall * comb * user * sys * (best of 1) (glob)
+  ! wall * comb * user * sys * (max of 1) (glob)
+  ! wall * comb * user * sys * (avg of 1) (glob)
+  ! wall * comb * user * sys * (median of 1) (glob)
   searching for changes
   $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=1
   ! wall * comb * user * sys * (best of 1) (glob)
+  ! wall * comb * user * sys * (max of 1) (glob)
+  ! wall * comb * user * sys * (avg of 1) (glob)
+  ! wall * comb * user * sys * (median of 1) (glob)
   searching for changes
   searching for changes
   $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=3
   ! wall * comb * user * sys * (best of 1) (glob)
+  ! wall * comb * user * sys * (max of 1) (glob)
+  ! wall * comb * user * sys * (avg of 1) (glob)
+  ! wall * comb * user * sys * (median of 1) (glob)
   searching for changes
   searching for changes
   searching for changes
--- a/tests/test-devel-warnings.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-devel-warnings.t	Mon Aug 07 11:08:00 2023 +0200
@@ -455,10 +455,10 @@
   > EOF
 
   $ hg --config "extensions.buggyconfig=${TESTTMP}/buggyconfig.py" buggyconfig
-  devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !)
-  devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !)
-  devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !)
-  devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !)
+  devel-warn: extension 'buggyconfig' overwrites config item 'ui.interactive' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !)
+  devel-warn: extension 'buggyconfig' overwrites config item 'ui.quiet' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !)
+  devel-warn: extension 'buggyconfig' overwrites config item 'ui.interactive' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !)
+  devel-warn: extension 'buggyconfig' overwrites config item 'ui.quiet' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !)
   devel-warn: specifying a mismatched default value for a registered config item: 'ui.quiet' 'True' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
   devel-warn: specifying a mismatched default value for a registered config item: 'ui.interactive' 'False' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
   devel-warn: specifying a mismatched default value for a registered config item: 'test.some' 'bar' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
--- a/tests/test-fncache.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-fncache.t	Mon Aug 07 11:08:00 2023 +0200
@@ -275,7 +275,7 @@
   > 
   > def uisetup(ui):
   >     extensions.wrapfunction(
-  >         localrepo.localrepository, b'transaction', wrapper)
+  >         localrepo.localrepository, 'transaction', wrapper)
   > 
   > cmdtable = {}
   > 
--- a/tests/test-hgweb.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-hgweb.t	Mon Aug 07 11:08:00 2023 +0200
@@ -876,7 +876,7 @@
   >     except ValueError:
   >         raise error.Abort(b'signal.signal() called in thread?')
   > def uisetup(ui):
-  >    extensions.wrapfunction(signal, b'signal', disabledsig)
+  >    extensions.wrapfunction(signal, 'signal', disabledsig)
   > EOF
 
  by default, signal interrupt should be disabled while making a lock file
--- a/tests/test-infinitepush-bundlestore.t	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,755 +0,0 @@
-#require no-reposimplestore no-chg
-
-XXX-CHG this test hangs if `hg` is really `chg`. This was hidden by the use of
-`alias hg=chg` by run-tests.py. With such alias removed, this test is revealed
-buggy. This need to be resolved sooner than later.
-
-
-Testing infinipush extension and the confi options provided by it
-
-Create an ondisk bundlestore in .hg/scratchbranches
-  $ . "$TESTDIR/library-infinitepush.sh"
-  $ cp $HGRCPATH $TESTTMP/defaulthgrc
-  $ setupcommon
-  $ mkcommit() {
-  >    echo "$1" > "$1"
-  >    hg add "$1"
-  >    hg ci -m "$1"
-  > }
-  $ hg init repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd repo
-
-Check that we can send a scratch on the server and it does not show there in
-the history but is stored on disk
-  $ setupserver
-  $ cd ..
-  $ hg clone ssh://user@dummy/repo client -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd client
-  $ mkcommit initialcommit
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r .
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: adding changesets
-  remote: adding manifests
-  remote: adding file changes
-  remote: added 1 changesets with 1 changes to 1 files
-  $ mkcommit scratchcommit
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 1 commit:
-  remote:     20759b6926ce  scratchcommit
-  $ hg log -G
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  changeset:   1:20759b6926ce
-  |  bookmark:    scratch/mybranch
-  |  tag:         tip
-  |  user:        test
-  |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     scratchcommit
-  |
-  o  changeset:   0:67145f466344
-     user:        test
-     date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     initialcommit
-  
-  $ hg log -G -R ../repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  changeset:   0:67145f466344
-     tag:         tip
-     user:        test
-     date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     initialcommit
-  
-  $ find ../repo/.hg/scratchbranches | sort
-  ../repo/.hg/scratchbranches
-  ../repo/.hg/scratchbranches/filebundlestore
-  ../repo/.hg/scratchbranches/filebundlestore/b9
-  ../repo/.hg/scratchbranches/filebundlestore/b9/e1
-  ../repo/.hg/scratchbranches/filebundlestore/b9/e1/b9e1ee5f93fb6d7c42496fc176c09839639dd9cc
-  ../repo/.hg/scratchbranches/index
-  ../repo/.hg/scratchbranches/index/bookmarkmap
-  ../repo/.hg/scratchbranches/index/bookmarkmap/scratch
-  ../repo/.hg/scratchbranches/index/bookmarkmap/scratch/mybranch
-  ../repo/.hg/scratchbranches/index/nodemap
-  ../repo/.hg/scratchbranches/index/nodemap/20759b6926ce827d5a8c73eb1fa9726d6f7defb2
-
-From another client we can get the scratchbranch if we ask for it explicitely
-
-  $ cd ..
-  $ hg clone ssh://user@dummy/repo client2 -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd client2
-  $ hg pull -B scratch/mybranch --traceback
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets 20759b6926ce (1 drafts)
-  (run 'hg update' to get a working copy)
-  $ hg log -G
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  changeset:   1:20759b6926ce
-  |  bookmark:    scratch/mybranch
-  |  tag:         tip
-  |  user:        test
-  |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     scratchcommit
-  |
-  @  changeset:   0:67145f466344
-     user:        test
-     date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     initialcommit
-  
-  $ cd ..
-
-Push to non-scratch bookmark
-
-  $ cd client
-  $ hg up 0
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  $ mkcommit newcommit
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  created new head
-  $ hg push -r .
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: adding changesets
-  remote: adding manifests
-  remote: adding file changes
-  remote: added 1 changesets with 1 changes to 1 files
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  newcommit public
-  |
-  | o  scratchcommit draft scratch/mybranch
-  |/
-  o  initialcommit public
-  
-
-Push to scratch branch
-  $ cd ../client2
-  $ hg up -q scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ mkcommit 'new scratch commit'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 2 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  new scratch commit draft scratch/mybranch
-  |
-  o  scratchcommit draft
-  |
-  o  initialcommit public
-  
-  $ scratchnodes
-  1de1d7d92f8965260391d0513fe8a8d5973d3042 bed63daed3beba97fff2e819a148cf415c217a85
-  20759b6926ce827d5a8c73eb1fa9726d6f7defb2 bed63daed3beba97fff2e819a148cf415c217a85
-
-  $ scratchbookmarks
-  scratch/mybranch 1de1d7d92f8965260391d0513fe8a8d5973d3042
-
-Push scratch bookmark with no new revs
-  $ hg push -r . -B scratch/anotherbranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 2 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  new scratch commit draft scratch/anotherbranch scratch/mybranch
-  |
-  o  scratchcommit draft
-  |
-  o  initialcommit public
-  
-  $ scratchbookmarks
-  scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042
-  scratch/mybranch 1de1d7d92f8965260391d0513fe8a8d5973d3042
-
-Pull scratch and non-scratch bookmark at the same time
-
-  $ hg -R ../repo book newbook
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd ../client
-  $ hg pull -B newbook -B scratch/mybranch --traceback
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  adding remote bookmark newbook
-  added 1 changesets with 1 changes to 2 files
-  new changesets 1de1d7d92f89 (1 drafts)
-  (run 'hg update' to get a working copy)
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  new scratch commit draft scratch/mybranch
-  |
-  | @  newcommit public
-  | |
-  o |  scratchcommit draft
-  |/
-  o  initialcommit public
-  
-
-Push scratch revision without bookmark with --bundle-store
-
-  $ hg up -q tip
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ mkcommit scratchcommitnobook
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  scratchcommitnobook draft
-  |
-  o  new scratch commit draft scratch/mybranch
-  |
-  | o  newcommit public
-  | |
-  o |  scratchcommit draft
-  |/
-  o  initialcommit public
-  
-  $ hg push -r . --bundle-store
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 3 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  remote:     2b5d271c7e0d  scratchcommitnobook
-  $ hg -R ../repo log -G -T '{desc} {phase}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  newcommit public
-  |
-  o  initialcommit public
-  
-
-  $ scratchnodes
-  1de1d7d92f8965260391d0513fe8a8d5973d3042 66fa08ff107451320512817bed42b7f467a1bec3
-  20759b6926ce827d5a8c73eb1fa9726d6f7defb2 66fa08ff107451320512817bed42b7f467a1bec3
-  2b5d271c7e0d25d811359a314d413ebcc75c9524 66fa08ff107451320512817bed42b7f467a1bec3
-
-Test with pushrebase
-  $ mkcommit scratchcommitwithpushrebase
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 4 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  remote:     2b5d271c7e0d  scratchcommitnobook
-  remote:     d8c4f54ab678  scratchcommitwithpushrebase
-  $ hg -R ../repo log -G -T '{desc} {phase}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  newcommit public
-  |
-  o  initialcommit public
-  
-  $ scratchnodes
-  1de1d7d92f8965260391d0513fe8a8d5973d3042 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397
-  20759b6926ce827d5a8c73eb1fa9726d6f7defb2 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397
-  2b5d271c7e0d25d811359a314d413ebcc75c9524 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397
-  d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397
-
-Change the order of pushrebase and infinitepush
-  $ mkcommit scratchcommitwithpushrebase2
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 5 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  remote:     2b5d271c7e0d  scratchcommitnobook
-  remote:     d8c4f54ab678  scratchcommitwithpushrebase
-  remote:     6c10d49fe927  scratchcommitwithpushrebase2
-  $ hg -R ../repo log -G -T '{desc} {phase}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  newcommit public
-  |
-  o  initialcommit public
-  
-  $ scratchnodes
-  1de1d7d92f8965260391d0513fe8a8d5973d3042 cd0586065eaf8b483698518f5fc32531e36fd8e0
-  20759b6926ce827d5a8c73eb1fa9726d6f7defb2 cd0586065eaf8b483698518f5fc32531e36fd8e0
-  2b5d271c7e0d25d811359a314d413ebcc75c9524 cd0586065eaf8b483698518f5fc32531e36fd8e0
-  6c10d49fe92751666c40263f96721b918170d3da cd0586065eaf8b483698518f5fc32531e36fd8e0
-  d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 cd0586065eaf8b483698518f5fc32531e36fd8e0
-
-Non-fastforward scratch bookmark push
-
-  $ hg log -GT "{rev}:{node} {desc}\n"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  6:6c10d49fe92751666c40263f96721b918170d3da scratchcommitwithpushrebase2
-  |
-  o  5:d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 scratchcommitwithpushrebase
-  |
-  o  4:2b5d271c7e0d25d811359a314d413ebcc75c9524 scratchcommitnobook
-  |
-  o  3:1de1d7d92f8965260391d0513fe8a8d5973d3042 new scratch commit
-  |
-  | o  2:91894e11e8255bf41aa5434b7b98e8b2aa2786eb newcommit
-  | |
-  o |  1:20759b6926ce827d5a8c73eb1fa9726d6f7defb2 scratchcommit
-  |/
-  o  0:67145f4663446a9580364f70034fea6e21293b6f initialcommit
-  
-  $ hg up 6c10d49fe927
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ echo 1 > amend
-  $ hg add amend
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg ci --amend -m 'scratch amended commit'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  saved backup bundle to $TESTTMP/client/.hg/strip-backup/6c10d49fe927-c99ffec5-amend.hg
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  scratch amended commit draft scratch/mybranch
-  |
-  o  scratchcommitwithpushrebase draft
-  |
-  o  scratchcommitnobook draft
-  |
-  o  new scratch commit draft
-  |
-  | o  newcommit public
-  | |
-  o |  scratchcommit draft
-  |/
-  o  initialcommit public
-  
-
-  $ scratchbookmarks
-  scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042
-  scratch/mybranch 6c10d49fe92751666c40263f96721b918170d3da
-  $ hg push -r . -B scratch/mybranch
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 5 commits:
-  remote:     20759b6926ce  scratchcommit
-  remote:     1de1d7d92f89  new scratch commit
-  remote:     2b5d271c7e0d  scratchcommitnobook
-  remote:     d8c4f54ab678  scratchcommitwithpushrebase
-  remote:     8872775dd97a  scratch amended commit
-  $ scratchbookmarks
-  scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042
-  scratch/mybranch 8872775dd97a750e1533dc1fbbca665644b32547
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  scratch amended commit draft scratch/mybranch
-  |
-  o  scratchcommitwithpushrebase draft
-  |
-  o  scratchcommitnobook draft
-  |
-  o  new scratch commit draft
-  |
-  | o  newcommit public
-  | |
-  o |  scratchcommit draft
-  |/
-  o  initialcommit public
-  
-Check that push path is not ignored. Add new path to the hgrc
-  $ cat >> .hg/hgrc << EOF
-  > [paths]
-  > peer=ssh://user@dummy/client2
-  > EOF
-
-Checkout last non-scrath commit
-  $ hg up 91894e11e8255
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  1 files updated, 0 files merged, 6 files removed, 0 files unresolved
-  $ mkcommit peercommit
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-Use --force because this push creates new head
-  $ hg push peer -r . -f
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/client2
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: adding changesets
-  remote: adding manifests
-  remote: adding file changes
-  remote: added 2 changesets with 2 changes to 2 files (+1 heads)
-  $ hg -R ../repo log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  newcommit public
-  |
-  o  initialcommit public
-  
-  $ hg -R ../client2 log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  peercommit public
-  |
-  o  newcommit public
-  |
-  | @  new scratch commit draft scratch/anotherbranch scratch/mybranch
-  | |
-  | o  scratchcommit draft
-  |/
-  o  initialcommit public
-  
--- a/tests/test-infinitepush-ci.t	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,579 +0,0 @@
-#require no-reposimplestore
-
-Testing the case when there is no infinitepush extension present on the client
-side and the server routes each push to bundlestore. This case is very much
-similar to CI use case.
-
-Setup
------
-
-  $ . "$TESTDIR/library-infinitepush.sh"
-  $ cat >> $HGRCPATH <<EOF
-  > [alias]
-  > glog = log -GT "{rev}:{node|short} {desc}\n{phase}"
-  > EOF
-  $ cp $HGRCPATH $TESTTMP/defaulthgrc
-  $ hg init repo
-  $ cd repo
-  $ setupserver
-  $ echo "pushtobundlestore = True" >> .hg/hgrc
-  $ echo "[extensions]" >> .hg/hgrc
-  $ echo "infinitepush=" >> .hg/hgrc
-  $ echo "[infinitepush]" >> .hg/hgrc
-  $ echo "deprecation-abort=no" >> .hg/hgrc
-  $ echo initialcommit > initialcommit
-  $ hg ci -Aqm "initialcommit"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact (chg !)
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be (chg !)
-  unused and barring learning of users of this functionality, we drop this (chg !)
-  extension in Mercurial 6.6. (chg !)
-  $ hg phase --public .
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-
-  $ cd ..
-  $ hg clone repo client -q
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg clone repo client2 -q
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg clone ssh://user@dummy/repo client3 -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  $ cd client
-
-Pushing a new commit from the client to the server
------------------------------------------------------
-
-  $ echo foobar > a
-  $ hg ci -Aqm "added a"
-  $ hg glog
-  @  1:6cb0989601f1 added a
-  |  draft
-  o  0:67145f466344 initialcommit
-     public
-
-  $ hg push
-  pushing to $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  storing changesets on the bundlestore
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing 1 commit:
-      6cb0989601f1  added a
-
-  $ scratchnodes
-  6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
-
-Understanding how data is stored on the bundlestore in server
--------------------------------------------------------------
-
-There are two things, filebundlestore and index
-  $ ls ../repo/.hg/scratchbranches
-  filebundlestore
-  index
-
-filebundlestore stores the bundles
-  $ ls ../repo/.hg/scratchbranches/filebundlestore/3b/41/
-  3b414252ff8acab801318445d88ff48faf4a28c3
-
-index/nodemap stores a map of node id and file in which bundle is stored in filebundlestore
-  $ ls ../repo/.hg/scratchbranches/index/
-  nodemap
-  $ ls ../repo/.hg/scratchbranches/index/nodemap/
-  6cb0989601f1fb5805238edfb16f3606713d9a0b
-
-  $ cd ../repo
-
-Checking that the commit was not applied to revlog on the server
-------------------------------------------------------------------
-
-  $ hg glog
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  @  0:67145f466344 initialcommit
-     public
-
-Applying the changeset from the bundlestore
---------------------------------------------
-
-  $ hg unbundle .hg/scratchbranches/filebundlestore/3b/41/3b414252ff8acab801318445d88ff48faf4a28c3
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets 6cb0989601f1
-  (run 'hg update' to get a working copy)
-
-  $ hg glog
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-Pushing more changesets from the local repo
---------------------------------------------
-
-  $ cd ../client
-  $ echo b > b
-  $ hg ci -Aqm "added b"
-  $ echo c > c
-  $ hg ci -Aqm "added c"
-  $ hg glog
-  @  3:bf8a6e3011b3 added c
-  |  draft
-  o  2:eaba929e866c added b
-  |  draft
-  o  1:6cb0989601f1 added a
-  |  public
-  o  0:67145f466344 initialcommit
-     public
-
-  $ hg push
-  pushing to $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  storing changesets on the bundlestore
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing 2 commits:
-      eaba929e866c  added b
-      bf8a6e3011b3  added c
-
-Checking that changesets are not applied on the server
-------------------------------------------------------
-
-  $ hg glog -R ../repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-Both of the new changesets are stored in a single bundle-file
-  $ scratchnodes
-  6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
-  bf8a6e3011b345146bbbedbcb1ebd4837571492a 239585f5e61f0c09ce7106bdc1097bff731738f4
-  eaba929e866c59bc9a6aada5a9dd2f6990db83c0 239585f5e61f0c09ce7106bdc1097bff731738f4
-
-Pushing more changesets to the server
--------------------------------------
-
-  $ echo d > d
-  $ hg ci -Aqm "added d"
-  $ echo e > e
-  $ hg ci -Aqm "added e"
-
-XXX: we should have pushed only the parts which are not in bundlestore
-  $ hg push
-  pushing to $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  storing changesets on the bundlestore
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing 4 commits:
-      eaba929e866c  added b
-      bf8a6e3011b3  added c
-      1bb96358eda2  added d
-      b4e4bce66051  added e
-
-Sneak peek into the bundlestore at the server
-  $ scratchnodes
-  1bb96358eda285b536c6d1c66846a7cdb2336cea 98fbae0016662521b0007da1b7bc349cd3caacd1
-  6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
-  b4e4bce660512ad3e71189e14588a70ac8e31fef 98fbae0016662521b0007da1b7bc349cd3caacd1
-  bf8a6e3011b345146bbbedbcb1ebd4837571492a 98fbae0016662521b0007da1b7bc349cd3caacd1
-  eaba929e866c59bc9a6aada5a9dd2f6990db83c0 98fbae0016662521b0007da1b7bc349cd3caacd1
-
-Checking if `hg pull` pulls something or `hg incoming` shows something
------------------------------------------------------------------------
-
-  $ hg incoming
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  comparing with $TESTTMP/repo
-  searching for changes
-  no changes found
-  [1]
-
-  $ hg pull
-  pulling from $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  no changes found
-
-Pulling from second client which is a localpeer to test `hg pull -r <rev>`
---------------------------------------------------------------------------
-
-Pulling the revision which is applied
-
-  $ cd ../client2
-  $ hg pull -r 6cb0989601f1
-  pulling from $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets 6cb0989601f1
-  (run 'hg update' to get a working copy)
-  $ hg glog
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-Pulling the revision which is in bundlestore
-XXX: we should support pulling revisions from a local peers bundlestore without
-client side wrapping
-
-  $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
-  pulling from $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  abort: unknown revision 'b4e4bce660512ad3e71189e14588a70ac8e31fef'
-  [10]
-  $ hg glog
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-  $ cd ../client
-
-Pulling from third client which is not a localpeer
----------------------------------------------------
-
-Pulling the revision which is applied
-
-  $ cd ../client3
-  $ hg pull -r 6cb0989601f1
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets 6cb0989601f1
-  (run 'hg update' to get a working copy)
-  $ hg glog
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-Pulling the revision which is in bundlestore
-
-Trying to specify short hash
-XXX: we should support this
-  $ hg pull -r b4e4bce660512
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  abort: unknown revision 'b4e4bce660512'
-  [255]
-
-XXX: we should show better message when the pull is happening from bundlestore
-  $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 4 changesets with 4 changes to 4 files
-  new changesets eaba929e866c:b4e4bce66051
-  (run 'hg update' to get a working copy)
-  $ hg glog
-  o  5:b4e4bce66051 added e
-  |  public
-  o  4:1bb96358eda2 added d
-  |  public
-  o  3:bf8a6e3011b3 added c
-  |  public
-  o  2:eaba929e866c added b
-  |  public
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-  $ cd ../client
-
-Checking storage of phase information with the bundle on bundlestore
----------------------------------------------------------------------
-
-creating a draft commit
-  $ cat >> $HGRCPATH <<EOF
-  > [phases]
-  > publish = False
-  > EOF
-  $ echo f > f
-  $ hg ci -Aqm "added f"
-  $ hg glog -r '.^::'
-  @  6:9b42578d4447 added f
-  |  draft
-  o  5:b4e4bce66051 added e
-  |  public
-  ~
-
-  $ hg push
-  pushing to $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  storing changesets on the bundlestore
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing 5 commits:
-      eaba929e866c  added b
-      bf8a6e3011b3  added c
-      1bb96358eda2  added d
-      b4e4bce66051  added e
-      9b42578d4447  added f
-
-XXX: the phase of 9b42578d4447 should not be changed here
-  $ hg glog -r .
-  @  6:9b42578d4447 added f
-  |  public
-  ~
-
-applying the bundle on the server to check preservation of phase-information
-
-  $ cd ../repo
-  $ scratchnodes
-  1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
-  9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f
-
-  $ hg unbundle .hg/scratchbranches/filebundlestore/28/0a/280a46a259a268f0e740c81c5a7751bdbfaec85f
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 5 changesets with 5 changes to 5 files
-  new changesets eaba929e866c:9b42578d4447 (1 drafts)
-  (run 'hg update' to get a working copy)
-
-  $ hg glog
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  6:9b42578d4447 added f
-  |  draft
-  o  5:b4e4bce66051 added e
-  |  public
-  o  4:1bb96358eda2 added d
-  |  public
-  o  3:bf8a6e3011b3 added c
-  |  public
-  o  2:eaba929e866c added b
-  |  public
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-Checking storage of obsmarkers in the bundlestore
---------------------------------------------------
-
-enabling obsmarkers and rebase extension
-
-  $ cat >> $HGRCPATH << EOF
-  > [experimental]
-  > evolution = all
-  > [extensions]
-  > rebase =
-  > EOF
-
-  $ cd ../client
-
-  $ hg phase -r . --draft --force
-  $ hg rebase -r 6 -d 3
-  rebasing 6:9b42578d4447 tip "added f"
-
-  $ hg glog
-  @  7:99949238d9ac added f
-  |  draft
-  | o  5:b4e4bce66051 added e
-  | |  public
-  | o  4:1bb96358eda2 added d
-  |/   public
-  o  3:bf8a6e3011b3 added c
-  |  public
-  o  2:eaba929e866c added b
-  |  public
-  o  1:6cb0989601f1 added a
-  |  public
-  o  0:67145f466344 initialcommit
-     public
-
-  $ hg push -f
-  pushing to $TESTTMP/repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  storing changesets on the bundlestore
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing 1 commit:
-      99949238d9ac  added f
-
-XXX: the phase should not have changed here
-  $ hg glog -r .
-  @  7:99949238d9ac added f
-  |  public
-  ~
-
-Unbundling on server to see obsmarkers being applied
-
-  $ cd ../repo
-
-  $ scratchnodes
-  1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3
-  99949238d9ac7f2424a33a46dface6f866afd059 090a24fe63f31d3b4bee714447f835c8c362ff57
-  9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f
-  eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f
-
-  $ hg glog
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact (chg !)
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be (chg !)
-  unused and barring learning of users of this functionality, we drop this (chg !)
-  extension in Mercurial 6.6. (chg !)
-  o  6:9b42578d4447 added f
-  |  draft
-  o  5:b4e4bce66051 added e
-  |  public
-  o  4:1bb96358eda2 added d
-  |  public
-  o  3:bf8a6e3011b3 added c
-  |  public
-  o  2:eaba929e866c added b
-  |  public
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
-
-  $ hg unbundle .hg/scratchbranches/filebundlestore/09/0a/090a24fe63f31d3b4bee714447f835c8c362ff57
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 0 changes to 1 files (+1 heads)
-  1 new obsolescence markers
-  obsoleted 1 changesets
-  new changesets 99949238d9ac (1 drafts)
-  (run 'hg heads' to see heads, 'hg merge' to merge)
-
-  $ hg glog
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  7:99949238d9ac added f
-  |  draft
-  | o  5:b4e4bce66051 added e
-  | |  public
-  | o  4:1bb96358eda2 added d
-  |/   public
-  o  3:bf8a6e3011b3 added c
-  |  public
-  o  2:eaba929e866c added b
-  |  public
-  o  1:6cb0989601f1 added a
-  |  public
-  @  0:67145f466344 initialcommit
-     public
--- a/tests/test-infinitepush.t	Mon Aug 07 11:05:43 2023 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,472 +0,0 @@
-#require no-reposimplestore no-chg
-
-XXX-CHG this test hangs if `hg` is really `chg`. This was hidden by the use of
-`alias hg=chg` by run-tests.py. With such alias removed, this test is revealed
-buggy. This need to be resolved sooner than later.
-
-
-Testing infinipush extension and the confi options provided by it
-
-Setup
-
-  $ . "$TESTDIR/library-infinitepush.sh"
-  $ cp $HGRCPATH $TESTTMP/defaulthgrc
-  $ setupcommon
-  $ hg init repo
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd repo
-  $ setupserver
-  $ echo initialcommit > initialcommit
-  $ hg ci -Aqm "initialcommit"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg phase --public .
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-
-  $ cd ..
-  $ hg clone ssh://user@dummy/repo client -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-
-Create two heads. Push first head alone, then two heads together. Make sure that
-multihead push works.
-  $ cd client
-  $ echo multihead1 > multihead1
-  $ hg add multihead1
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg ci -m "multihead1"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg up null
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  $ echo multihead2 > multihead2
-  $ hg ci -Am "multihead2"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding multihead2
-  created new head
-  $ hg push -r . --bundle-store
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 1 commit:
-  remote:     ee4802bf6864  multihead2
-  $ hg push -r '1:2' --bundle-store
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 2 commits:
-  remote:     bc22f9a30a82  multihead1
-  remote:     ee4802bf6864  multihead2
-  $ scratchnodes
-  bc22f9a30a821118244deacbd732e394ed0b686c de1b7d132ba98f0172cd974e3e69dfa80faa335c
-  ee4802bf6864326a6b3dcfff5a03abc2a0a69b8f de1b7d132ba98f0172cd974e3e69dfa80faa335c
-
-Create two new scratch bookmarks
-  $ hg up 0
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  $ echo scratchfirstpart > scratchfirstpart
-  $ hg ci -Am "scratchfirstpart"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding scratchfirstpart
-  created new head
-  $ hg push -r . -B scratch/firstpart
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 1 commit:
-  remote:     176993b87e39  scratchfirstpart
-  $ hg up 0
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  $ echo scratchsecondpart > scratchsecondpart
-  $ hg ci -Am "scratchsecondpart"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding scratchsecondpart
-  created new head
-  $ hg push -r . -B scratch/secondpart
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 1 commit:
-  remote:     8db3891c220e  scratchsecondpart
-
-Pull two bookmarks from the second client
-  $ cd ..
-  $ hg clone ssh://user@dummy/repo client2 -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd client2
-  $ hg pull -B scratch/firstpart -B scratch/secondpart
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 2 changesets with 2 changes to 2 files (+1 heads)
-  new changesets * (glob)
-  (run 'hg heads' to see heads, 'hg merge' to merge)
-  $ hg log -r scratch/secondpart -T '{node}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  8db3891c220e216f6da214e8254bd4371f55efca (no-eol)
-  $ hg log -r scratch/firstpart -T '{node}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  176993b87e39bd88d66a2cccadabe33f0b346339 (no-eol)
-Make two commits to the scratch branch
-
-  $ echo testpullbycommithash1 > testpullbycommithash1
-  $ hg ci -Am "testpullbycommithash1"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  adding testpullbycommithash1
-  created new head
-  $ hg log -r '.' -T '{node}\n' > ../testpullbycommithash1
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ echo testpullbycommithash2 > testpullbycommithash2
-  $ hg ci -Aqm "testpullbycommithash2"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/mybranch -q
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-
-Create third client and pull by commit hash.
-Make sure testpullbycommithash2 has not fetched
-  $ cd ..
-  $ hg clone ssh://user@dummy/repo client3 -q
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd client3
-  $ hg pull -r `cat ../testpullbycommithash1`
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets 33910bfe6ffe (1 drafts)
-  (run 'hg update' to get a working copy)
-  $ hg log -G -T '{desc} {phase} {bookmarks}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  testpullbycommithash1 draft
-  |
-  @  initialcommit public
-  
-Make public commit in the repo and pull it.
-Make sure phase on the client is public.
-  $ cd ../repo
-  $ echo publiccommit > publiccommit
-  $ hg ci -Aqm "publiccommit"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg phase --public .
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ cd ../client3
-  $ hg pull
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files (+1 heads)
-  new changesets a79b6597f322
-  (run 'hg heads' to see heads, 'hg merge' to merge)
-  $ hg log -G -T '{desc} {phase} {bookmarks} {node|short}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  o  publiccommit public  a79b6597f322
-  |
-  | o  testpullbycommithash1 draft  33910bfe6ffe
-  |/
-  @  initialcommit public  67145f466344
-  
-  $ hg up a79b6597f322
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ echo scratchontopofpublic > scratchontopofpublic
-  $ hg ci -Aqm "scratchontopofpublic"
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  $ hg push -r . -B scratch/scratchontopofpublic
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pushing to ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  remote: pushing 1 commit:
-  remote:     c70aee6da07d  scratchontopofpublic
-  $ cd ../client2
-  $ hg pull -B scratch/scratchontopofpublic
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  pulling from ssh://user@dummy/repo
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  searching for changes
-  remote: IMPORTANT: if you use this extension, please contact
-  remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  remote: unused and barring learning of users of this functionality, we drop this
-  remote: extension in Mercurial 6.6.
-  adding changesets
-  adding manifests
-  adding file changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 2 changesets with 2 changes to 2 files (+1 heads)
-  new changesets a79b6597f322:c70aee6da07d (1 drafts)
-  (run 'hg heads .' to see heads, 'hg merge' to merge)
-  $ hg log -r scratch/scratchontopofpublic -T '{phase}'
-  IMPORTANT: if you use this extension, please contact
-  mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be
-  unused and barring learning of users of this functionality, we drop this
-  extension in Mercurial 6.6.
-  draft (no-eol)
--- a/tests/test-journal.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-journal.t	Mon Aug 07 11:08:00 2023 +0200
@@ -84,6 +84,17 @@
   cb9a9f314b8b  book -f bar
   1e6c11564562  book -r tip bar
 
+Test that we tracks bookmark deletion
+
+  $ hg book -r . babar
+  $ hg book -f -r .~1 babar
+  $ hg book -d babar
+  $ hg journal babar
+  previous locations of 'babar':
+  000000000000  book -d babar
+  cb9a9f314b8b  book -f -r '.~1' babar
+  1e6c11564562  book -r . babar
+
 Test that bookmarks and working copy tracking is not mixed
 
   $ hg journal
@@ -99,6 +110,9 @@
   $ hg journal --all
   previous locations of the working copy and bookmarks:
   1e6c11564562  baz       book -r tip baz
+  000000000000  babar     book -d babar
+  cb9a9f314b8b  babar     book -f -r '.~1' babar
+  1e6c11564562  babar     book -r . babar
   1e6c11564562  bar       up
   1e6c11564562  .         up
   cb9a9f314b8b  bar       book -f bar
@@ -127,6 +141,9 @@
   $ hg journal "re:ba."
   previous locations of 're:ba.':
   1e6c11564562  baz       book -r tip baz
+  000000000000  babar     book -d babar
+  cb9a9f314b8b  babar     book -f -r '.~1' babar
+  1e6c11564562  babar     book -r . babar
   1e6c11564562  bar       up
   cb9a9f314b8b  bar       book -f bar
   1e6c11564562  bar       book -r tip bar
@@ -136,6 +153,9 @@
   $ hg journal --verbose --all
   previous locations of the working copy and bookmarks:
   000000000000 -> 1e6c11564562 foobar    baz      1970-01-01 00:00 +0000  book -r tip baz
+  cb9a9f314b8b -> 000000000000 foobar    babar    1970-01-01 00:00 +0000  book -d babar
+  1e6c11564562 -> cb9a9f314b8b foobar    babar    1970-01-01 00:00 +0000  book -f -r '.~1' babar
+  000000000000 -> 1e6c11564562 foobar    babar    1970-01-01 00:00 +0000  book -r . babar
   cb9a9f314b8b -> 1e6c11564562 foobar    bar      1970-01-01 00:00 +0000  up
   cb9a9f314b8b -> 1e6c11564562 foobar    .        1970-01-01 00:00 +0000  up
   1e6c11564562 -> cb9a9f314b8b foobar    bar      1970-01-01 00:00 +0000  book -f bar
--- a/tests/test-lfs.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-lfs.t	Mon Aug 07 11:08:00 2023 +0200
@@ -829,7 +829,7 @@
   > eh = exthelper.exthelper()
   > uisetup = eh.finaluisetup
   > 
-  > @eh.wrapfunction(wrapper, b'filelogrenamed')
+  > @eh.wrapfunction(wrapper, 'filelogrenamed')
   > def filelogrenamed(orig, orig1, self, node):
   >     ret = orig(orig1, self, node)
   >     if wrapper._islfs(self._revlog, node) and ret:
--- a/tests/test-narrow-expanddirstate.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-narrow-expanddirstate.t	Mon Aug 07 11:08:00 2023 +0200
@@ -99,7 +99,7 @@
   >       expandnarrowspec(ui, repo, encoding.environ.get(b'PATCHINCLUDES'))
   >       return orig(ui, repo, *args, **kwargs)
   > 
-  >   extensions.wrapfunction(patch, b'patch', overridepatch)
+  >   extensions.wrapfunction(patch, 'patch', overridepatch)
   > EOF
   $ cat >> ".hg/hgrc" <<EOF
   > [extensions]
--- a/tests/test-push-race.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-push-race.t	Mon Aug 07 11:08:00 2023 +0200
@@ -76,7 +76,7 @@
   >     return orig(pushop)
   > 
   > def uisetup(ui):
-  >     extensions.wrapfunction(exchange, b'_pushbundle2', delaypush)
+  >     extensions.wrapfunction(exchange, '_pushbundle2', delaypush)
   > EOF
 
   $ waiton () {
--- a/tests/test-ssh-bundle1.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-ssh-bundle1.t	Mon Aug 07 11:08:00 2023 +0200
@@ -427,7 +427,7 @@
   >     return res
   > 
   > def extsetup(ui):
-  >     extensions.wrapfunction(exchange, b'push', wrappedpush)
+  >     extensions.wrapfunction(exchange, 'push', wrappedpush)
   > EOF
 
   $ cat >> .hg/hgrc << EOF
--- a/tests/test-ssh.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-ssh.t	Mon Aug 07 11:08:00 2023 +0200
@@ -479,7 +479,7 @@
   >     return res
   > 
   > def extsetup(ui):
-  >     extensions.wrapfunction(exchange, b'push', wrappedpush)
+  >     extensions.wrapfunction(exchange, 'push', wrappedpush)
   > EOF
 
   $ cat >> .hg/hgrc << EOF
--- a/tests/test-strip.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-strip.t	Mon Aug 07 11:08:00 2023 +0200
@@ -970,7 +970,7 @@
   >          transaction.addpostclose(b"phase invalidation test", test)
   >     return transaction
   > def extsetup(ui):
-  >     extensions.wrapfunction(localrepo.localrepository, b"transaction",
+  >     extensions.wrapfunction(localrepo.localrepository, "transaction",
   >                             transactioncallback)
   > EOF
   $ hg up -C 2
--- a/tests/test-transaction-rollback-on-revlog-split.t	Mon Aug 07 11:05:43 2023 +0200
+++ b/tests/test-transaction-rollback-on-revlog-split.t	Mon Aug 07 11:08:00 2023 +0200
@@ -400,7 +400,6 @@
 The split was rollback
 
   $ f -s .hg/store/data*/file*
-  .hg/store/data/file.d: size=0
   .hg/store/data/file.i: size=1174
 
   $ hg tip