changeset 48479:715e4e81e39a

branching: merge stable into default
author Raphaël Gomès <rgomes@octobus.net>
date Wed, 08 Dec 2021 10:14:37 +0100
parents 0994125a31e5 (diff) 6e4999cb085e (current diff)
children 9cf5ac8c7109
files
diffstat 153 files changed, 2476 insertions(+), 1350 deletions(-) [+]
line wrap: on
line diff
--- a/contrib/automation/hgautomation/cli.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/contrib/automation/hgautomation/cli.py	Wed Dec 08 10:14:37 2021 +0100
@@ -158,7 +158,7 @@
 
         windows.synchronize_hg(SOURCE_ROOT, revision, instance)
 
-        for py_version in ("2.7", "3.7", "3.8", "3.9"):
+        for py_version in ("2.7", "3.7", "3.8", "3.9", "3.10"):
             for arch in ("x86", "x64"):
                 windows.purge_hg(winrm_client)
                 windows.build_wheel(
@@ -377,7 +377,7 @@
     sp.add_argument(
         '--python-version',
         help='Python version to build for',
-        choices={'2.7', '3.7', '3.8', '3.9'},
+        choices={'2.7', '3.7', '3.8', '3.9', '3.10'},
         nargs='*',
         default=['3.8'],
     )
@@ -501,7 +501,7 @@
     sp.add_argument(
         '--python-version',
         help='Python version to use',
-        choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9'},
+        choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
         default='2.7',
     )
     sp.add_argument(
--- a/contrib/automation/hgautomation/windows.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/contrib/automation/hgautomation/windows.py	Wed Dec 08 10:14:37 2021 +0100
@@ -129,6 +129,8 @@
 WHEEL_FILENAME_PYTHON38_X64 = 'mercurial-{version}-cp38-cp38-win_amd64.whl'
 WHEEL_FILENAME_PYTHON39_X86 = 'mercurial-{version}-cp39-cp39-win32.whl'
 WHEEL_FILENAME_PYTHON39_X64 = 'mercurial-{version}-cp39-cp39-win_amd64.whl'
+WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
+WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
 
 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
@@ -480,6 +482,8 @@
         dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
         dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
         dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
+        dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
+        dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
     )
 
 
@@ -493,6 +497,8 @@
         dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
         dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
         dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
+        dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
+        dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
         dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
         dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
         dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
--- a/contrib/install-windows-dependencies.ps1	Mon Dec 06 10:08:04 2021 +0100
+++ b/contrib/install-windows-dependencies.ps1	Wed Dec 08 10:14:37 2021 +0100
@@ -29,10 +29,15 @@
 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
 
-$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5.exe"
-$PYTHON39_x86_SHA256 = "505129081a839b699a6ab9064b441ad922ef03767b5dd4241fd0c2166baf64de"
-$PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5-amd64.exe"
-$PYTHON39_x64_SHA256 = "84d5243088ba00c11e51905c704dbe041040dfff044f4e1ce5476844ee2e6eac"
+$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9.exe"
+$PYTHON39_x86_SHA256 = "6646a5683adf14d35e8c53aab946895bc0f0b825f7acac3a62cc85ee7d0dc71a"
+$PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9-amd64.exe"
+$PYTHON39_x64_SHA256 = "137d59e5c0b01a8f1bdcba08344402ae658c81c6bf03b6602bd8b4e951ad0714"
+
+$PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0.exe"
+$PYTHON310_x86_SHA256 = "ea896eeefb1db9e12fb89ec77a6e28c9fe52b4a162a34c85d9688be2ec2392e8"
+$PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe"
+$PYTHON310_x64_SHA256 = "cb580eb7dc55f9198e650f016645023e8b2224cf7d033857d12880b46c5c94ef"
 
 # PIP 19.2.3.
 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
@@ -132,6 +137,8 @@
     Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
     Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256
     Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256
+    Secure-Download $PYTHON310_x86_URL ${prefix}\assets\python310-x86.exe $PYTHON310_x86_SHA256
+    Secure-Download $PYTHON310_x64_URL ${prefix}\assets\python310-x64.exe $PYTHON310_x64_SHA256
     Secure-Download $PIP_URL ${pip} $PIP_SHA256
     Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
     Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
@@ -146,6 +153,8 @@
 #    Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
     Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
     Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
+    Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
+    Install-Python3 "Python 3.10 64-bit" ${prefix}\assets\python310-x64.exe ${prefix}\python310-x64 ${pip}
 
     Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
     Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
--- a/contrib/packaging/requirements-windows-py3.txt	Mon Dec 06 10:08:04 2021 +0100
+++ b/contrib/packaging/requirements-windows-py3.txt	Wed Dec 08 10:14:37 2021 +0100
@@ -1,68 +1,84 @@
 #
-# This file is autogenerated by pip-compile
+# This file is autogenerated by pip-compile with python 3.7
 # To update, run:
 #
 #    pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py3.txt contrib/packaging/requirements-windows.txt.in
 #
 atomicwrites==1.4.0 \
     --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \
-    --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a \
+    --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a
     # via pytest
 attrs==21.2.0 \
     --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \
-    --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb \
+    --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb
     # via pytest
 cached-property==1.5.2 \
     --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
-    --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 \
+    --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0
     # via pygit2
 certifi==2021.5.30 \
     --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \
-    --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 \
+    --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8
     # via dulwich
-cffi==1.14.4 \
-    --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \
-    --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \
-    --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \
-    --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \
-    --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \
-    --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \
-    --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \
-    --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \
-    --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \
-    --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \
-    --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \
-    --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \
-    --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \
-    --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \
-    --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \
-    --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \
-    --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \
-    --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \
-    --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \
-    --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \
-    --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \
-    --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \
-    --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \
-    --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \
-    --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \
-    --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \
-    --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \
-    --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \
-    --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \
-    --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \
-    --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \
-    --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \
-    --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \
-    --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \
+cffi==1.15.0 \
+    --hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \
+    --hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \
+    --hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \
+    --hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \
+    --hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \
+    --hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \
+    --hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \
+    --hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \
+    --hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \
+    --hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \
+    --hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \
+    --hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \
+    --hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \
+    --hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \
+    --hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \
+    --hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \
+    --hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \
+    --hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \
+    --hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \
+    --hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \
+    --hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \
+    --hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \
+    --hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \
+    --hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \
+    --hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \
+    --hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \
+    --hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \
+    --hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \
+    --hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \
+    --hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \
+    --hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \
+    --hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \
+    --hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \
+    --hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \
+    --hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \
+    --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \
+    --hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \
+    --hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \
+    --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \
+    --hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \
+    --hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \
+    --hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \
+    --hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \
+    --hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \
+    --hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \
+    --hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \
+    --hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \
+    --hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \
+    --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \
+    --hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796
     # via pygit2
 colorama==0.4.4 \
     --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \
-    --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 \
+    --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2
     # via pytest
 docutils==0.16 \
     --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
-    --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
+    --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
     # via -r contrib/packaging/requirements-windows.txt.in
 dulwich==0.20.6 ; python_version >= "3" \
     --hash=sha256:1ccd55e38fa9f169290f93e027ab4508202f5bdd6ef534facac4edd3f6903f0d \
@@ -77,26 +93,29 @@
     --hash=sha256:8f7a7f973be2beedfb10dd8d3eb6bdf9ec466c72ad555704897cbd6357fe5021 \
     --hash=sha256:bea6e6caffc6c73bfd1647714c5715ab96ac49deb8beb8b67511529afa25685a \
     --hash=sha256:e5871b86a079e9e290f52ab14559cea1b694a0b8ed2b9ebb898f6ced7f14a406 \
-    --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b \
+    --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b
     # via -r contrib/packaging/requirements-windows.txt.in
 fuzzywuzzy==0.18.0 \
-    --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \
+    --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8
     # via -r contrib/packaging/requirements-windows.txt.in
 idna==3.2 \
     --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \
-    --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 \
+    --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3
     # via yarl
 importlib-metadata==3.1.0 \
     --hash=sha256:590690d61efdd716ff82c39ca9a9d4209252adfe288a4b5721181050acbd4175 \
-    --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099 \
-    # via keyring, pluggy, pytest
+    --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099
+    # via
+    #   keyring
+    #   pluggy
+    #   pytest
 iniconfig==1.1.1 \
     --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
-    --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 \
+    --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32
     # via pytest
 keyring==21.4.0 \
     --hash=sha256:4e34ea2fdec90c1c43d6610b5a5fafa1b9097db1802948e90caf5763974b8f8d \
-    --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466 \
+    --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466
     # via -r contrib/packaging/requirements-windows.txt.in
 multidict==5.1.0 \
     --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
@@ -135,62 +154,68 @@
     --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
     --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
     --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
-    --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 \
+    --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80
     # via yarl
 packaging==21.0 \
     --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \
-    --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 \
+    --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14
     # via pytest
 pluggy==0.13.1 \
     --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \
-    --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \
+    --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d
     # via pytest
 py==1.10.0 \
     --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \
-    --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a \
+    --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a
     # via pytest
-pycparser==2.20 \
-    --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
-    --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \
+pycparser==2.21 \
+    --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
+    --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
     # via cffi
-pygit2==1.4.0 ; python_version >= "3" \
-    --hash=sha256:0d298098e286eeda000e49ca7e1b41f87300e10dd8b9d06b32b008bd61f50b83 \
-    --hash=sha256:0ee135eb2cd8b07ce1374f3596cc5c3213472d6389bad6a4c5d87d8e267e93e9 \
-    --hash=sha256:32eb863d6651d4890ced318505ea8dc229bd9637deaf29c898de1ab574d727a0 \
-    --hash=sha256:37d6d7d6d7804c42a0fe23425c72e38093488525092fc5e51a05684e63503ce7 \
-    --hash=sha256:41204b6f3406d9f53147710f3cc485d77181ba67f57c34d36b7c86de1c14a18c \
-    --hash=sha256:818c91b582109d90580c5da74af783738838353f15eb12eeb734d80a974b05a3 \
-    --hash=sha256:8306a302487dac67df7af6a064bb37e8a8eb4138958f9560ff49ff162e185dab \
-    --hash=sha256:9c2f2d9ef59513007b66f6534b000792b614de3faf60313a0a68f6b8571aea85 \
-    --hash=sha256:9c8d5881eb709e2e2e13000b507a131bd5fb91a879581030088d0ddffbcd19af \
-    --hash=sha256:b422e417739def0a136a6355723dfe8a5ffc83db5098076f28a14f1d139779c1 \
-    --hash=sha256:cbeb38ab1df9b5d8896548a11e63aae8a064763ab5f1eabe4475e6b8a78ee1c8 \
-    --hash=sha256:cf00481ddf053e549a6edd0216bdc267b292d261eae02a67bb3737de920cbf88 \
-    --hash=sha256:d0d889144e9487d926fecea947c3f39ce5f477e521d7d467d2e66907e4cd657d \
-    --hash=sha256:ddb7a1f6d38063e8724abfa1cfdfb0f9b25014b8bca0546274b7a84b873a3888 \
-    --hash=sha256:e9037a7d810750fe23c9f5641ef14a0af2525ff03e14752cd4f73e1870ecfcb0 \
-    --hash=sha256:ec5c0365a9bdfcac1609d20868507b28685ec5ea7cc3a2c903c9b62ef2e0bbc0 \
-    --hash=sha256:fdd8ba30cda277290e000322f505132f590cf89bd7d31829b45a3cb57447ec32 \
+pygit2==1.7.1 ; python_version >= "3" \
+    --hash=sha256:2c9e95efb86c0b32cc07c26be3d179e851ca4a7899c47fef63c4203963144f5e \
+    --hash=sha256:3ddacbf461652d3d4900382f821d9fbd5ae2dedecd7862b5245842419ad0ccba \
+    --hash=sha256:4cb0414df6089d0072ebe93ff2f34730737172dd5f0e72289567d06a6caf09c0 \
+    --hash=sha256:56e960dc74f4582bfa3ca17a1a9d542732fc93b5cf8f82574c235d06b2d61eae \
+    --hash=sha256:6b17ab922c2a2d99b30ab9222472b07732bf7261d9f9655a4ea23b4c700049d8 \
+    --hash=sha256:73a7b471f22cb59e8729016de1f447c472b3b2c1cc2b622194e5e3b48a7f5776 \
+    --hash=sha256:761a8850e33822796c1c24d411d5cc2460c04e1a74b04ae8560efd3596bbd6bd \
+    --hash=sha256:7c467e81158f5827b3bca6362e5cc9b92857eff9de65034d338c1f18524b09be \
+    --hash=sha256:7c56e10592e62610a19bd3e2a633aafe3488c57b906c7c2fde0299937f0f0b2f \
+    --hash=sha256:7cc2a8e29cc9598310a78cf58b70d9331277cf374802be8f97d97c4a9e5d8387 \
+    --hash=sha256:812670f7994f31778e873a9eced29d2bbfa91674e8be0ab1e974c8a4bda9cbab \
+    --hash=sha256:8cdb0b1d6c3d24b44f340fed143b16e64ba23fe2a449f1a5db87aaf9339a9dbe \
+    --hash=sha256:91b77a305d8d18b649396e66e832d654cd593a3d29b5728f753f254a04533812 \
+    --hash=sha256:a75bcde32238c77eb0cf7d9698a5aa899408d7ad999a5920a29a7c4b80fdeaa7 \
+    --hash=sha256:b060240cf3038e7a0706bbfc5436dd03b8d5ac797ac1d512b613f4d04b974c80 \
+    --hash=sha256:cdfa61c0428a8182e5a6a1161c017b824cd511574f080a40b10d6413774eb0ca \
+    --hash=sha256:d7faa29558436decc2e78110f38d6677eb366b683ba5cdc2803d47195711165d \
+    --hash=sha256:d831825ad9c3b3c28e6b3ef8a2401ad2d3fd4db5455427ff27175a7e254e2592 \
+    --hash=sha256:df4c477bdfac85d32a1e3180282cd829a0980aa69be9bd0f7cbd4db1778ca72b \
+    --hash=sha256:eced3529bafcaaac015d08dfaa743b3cbad37fcd5b13ae9d280b8b7f716ec5ce \
+    --hash=sha256:fec17e2da668e6bb192d777417aad9c7ca924a166d0a0b9a81a11e00362b1bc7
     # via -r contrib/packaging/requirements-windows.txt.in
 pygments==2.7.1 \
     --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
-    --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \
+    --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7
     # via -r contrib/packaging/requirements-windows.txt.in
 pyparsing==2.4.7 \
     --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
-    --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
+    --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
     # via packaging
-pytest-vcr==1.0.2 \
-    --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896 \
-    # via -r contrib/packaging/requirements-windows.txt.in
 pytest==6.2.4 \
     --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \
-    --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 \
+    --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890
     # via pytest-vcr
+pytest-vcr==1.0.2 \
+    --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896
+    # via -r contrib/packaging/requirements-windows.txt.in
 pywin32-ctypes==0.2.0 \
     --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
-    --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \
-    # via -r contrib/packaging/requirements-windows.txt.in, keyring
+    --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
+    # via
+    #   -r contrib/packaging/requirements-windows.txt.in
+    #   keyring
 pyyaml==5.4.1 \
     --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
     --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
@@ -220,41 +245,43 @@
     --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
     --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
     --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
-    --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 \
+    --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
     # via vcrpy
 six==1.16.0 \
     --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
-    --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 \
+    --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
     # via vcrpy
 toml==0.10.2 \
     --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
-    --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f \
+    --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f
     # via pytest
 typing-extensions==3.10.0.0 \
     --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
     --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \
-    --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 \
+    --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84
     # via yarl
 urllib3==1.25.11 \
     --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
-    --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
+    --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e
     # via dulwich
 vcrpy==4.1.1 \
     --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \
-    --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 \
+    --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599
     # via pytest-vcr
-windows-curses==2.2.0 \
-    --hash=sha256:1452d771ec6f9b3fef037da2b169196a9a12be4e86a6c27dd579adac70c42028 \
-    --hash=sha256:267544e4f60c09af6505e50a69d7f01d7f8a281cf4bd4fc7efc3b32b9a4ef64e \
-    --hash=sha256:389228a3df556102e72450f599283094168aa82eee189f501ad9f131a0fc92e1 \
-    --hash=sha256:84336fe470fa07288daec5c684dec74c0766fec6b3511ccedb4c494804acfbb7 \
-    --hash=sha256:9aa6ff60be76f5de696dc6dbf7897e3b1e6abcf4c0f741e9a0ee22cd6ef382f8 \
-    --hash=sha256:c4a8ce00e82635f06648cc40d99f470be4e3ffeb84f9f7ae9d6a4f68ec6361e7 \
-    --hash=sha256:c5cd032bc7d0f03224ab55c925059d98e81795098d59bbd10f7d05c7ea9677ce \
-    --hash=sha256:fc0be372fe6da3c39d7093154ce029115a927bf287f34b4c615e2b3f8c23dfaa \
+windows-curses==2.3.0 \
+    --hash=sha256:170c0d941c2e0cdf864e7f0441c1bdf0709232bf4aa7ce7f54d90fc76a4c0504 \
+    --hash=sha256:4d5fb991d1b90a41c2332f02241a1f84c8a1e6bc8f6e0d26f532d0da7a9f7b51 \
+    --hash=sha256:7a35eda4cb120b9e1a5ae795f3bc06c55b92c9d391baba6be1903285a05f3551 \
+    --hash=sha256:935be95cfdb9213f6f5d3d5bcd489960e3a8fbc9b574e7b2e8a3a3cc46efff49 \
+    --hash=sha256:a3a63a0597729e10f923724c2cf972a23ea677b400d2387dee1d668cf7116177 \
+    --hash=sha256:c860f596d28377e47f322b7382be4d3573fd76d1292234996bb7f72e0bc0ed0d \
+    --hash=sha256:cc5fa913780d60f4a40824d374a4f8ca45b4e205546e83a2d85147315a57457e \
+    --hash=sha256:d5cde8ec6d582aa77af791eca54f60858339fb3f391945f9cad11b1ab71062e3 \
+    --hash=sha256:e913dc121446d92b33fe4f5bcca26d3a34e4ad19f2af160370d57c3d1e93b4e1 \
+    --hash=sha256:fbc2131cec57e422c6660e6cdb3420aff5be5169b8e45bb7c471f884b0590a2b
     # via -r contrib/packaging/requirements-windows.txt.in
 wrapt==1.12.1 \
-    --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 \
+    --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7
     # via vcrpy
 yarl==1.6.3 \
     --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
@@ -293,9 +320,9 @@
     --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
     --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
     --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
-    --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 \
+    --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71
     # via vcrpy
 zipp==3.4.0 \
     --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \
-    --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb \
+    --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb
     # via importlib-metadata
--- a/contrib/packaging/requirements.txt	Mon Dec 06 10:08:04 2021 +0100
+++ b/contrib/packaging/requirements.txt	Wed Dec 08 10:14:37 2021 +0100
@@ -1,16 +1,16 @@
 #
-# This file is autogenerated by pip-compile
+# This file is autogenerated by pip-compile with python 3.7
 # To update, run:
 #
 #    pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
 #
 docutils==0.16 \
     --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
-    --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
+    --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
     # via -r contrib/packaging/requirements.txt.in
 jinja2==2.11.2 \
     --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
-    --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
+    --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035
     # via -r contrib/packaging/requirements.txt.in
 markupsafe==1.1.1 \
     --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
@@ -45,5 +45,5 @@
     --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
     --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
     --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
-    --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
+    --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be
     # via jinja2
--- a/hgext/commitextras.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/commitextras.py	Wed Dec 08 10:14:37 2021 +0100
@@ -65,23 +65,23 @@
                         b"unable to parse '%s', should follow "
                         b"KEY=VALUE format"
                     )
-                    raise error.Abort(msg % raw)
+                    raise error.InputError(msg % raw)
                 k, v = raw.split(b'=', 1)
                 if not k:
                     msg = _(b"unable to parse '%s', keys can't be empty")
-                    raise error.Abort(msg % raw)
+                    raise error.InputError(msg % raw)
                 if re.search(br'[^\w-]', k):
                     msg = _(
                         b"keys can only contain ascii letters, digits,"
                         b" '_' and '-'"
                     )
-                    raise error.Abort(msg)
+                    raise error.InputError(msg)
                 if k in usedinternally:
                     msg = _(
                         b"key '%s' is used internally, can't be set "
                         b"manually"
                     )
-                    raise error.Abort(msg % k)
+                    raise error.InputError(msg % k)
                 inneropts['extra'][k] = v
             return super(repoextra, self).commit(*innerpats, **inneropts)
 
--- a/hgext/git/dirstate.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/git/dirstate.py	Wed Dec 08 10:14:37 2021 +0100
@@ -257,7 +257,7 @@
             if match(p):
                 yield p
 
-    def set_clean(self, f, parentfiledata=None):
+    def set_clean(self, f, parentfiledata):
         """Mark a file normal and clean."""
         # TODO: for now we just let libgit2 re-stat the file. We can
         # clearly do better.
--- a/hgext/histedit.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/histedit.py	Wed Dec 08 10:14:37 2021 +0100
@@ -1324,6 +1324,10 @@
 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
 """
+            if self.later_on_top:
+                help += b"Newer commits are shown above older commits.\n"
+            else:
+                help += b"Older commits are shown above newer commits.\n"
         return help.splitlines()
 
     def render_help(self, win):
--- a/hgext/keyword.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/keyword.py	Wed Dec 08 10:14:37 2021 +0100
@@ -116,6 +116,7 @@
     dateutil,
     stringutil,
 )
+from mercurial.dirstateutils import timestamp
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -326,6 +327,7 @@
             msg = _(b'overwriting %s expanding keywords\n')
         else:
             msg = _(b'overwriting %s shrinking keywords\n')
+        wctx = self.repo[None]
         for f in candidates:
             if self.restrict:
                 data = self.repo.file(f).read(mf[f])
@@ -356,7 +358,12 @@
                 fp.write(data)
                 fp.close()
                 if kwcmd:
-                    self.repo.dirstate.set_clean(f)
+                    s = wctx[f].lstat()
+                    mode = s.st_mode
+                    size = s.st_size
+                    mtime = timestamp.mtime_of(s)
+                    cache_data = (mode, size, mtime)
+                    self.repo.dirstate.set_clean(f, cache_data)
                 elif self.postcommit:
                     self.repo.dirstate.update_file_p1(f, p1_tracked=True)
 
--- a/hgext/largefiles/lfutil.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/largefiles/lfutil.py	Wed Dec 08 10:14:37 2021 +0100
@@ -32,6 +32,7 @@
     vfs as vfsmod,
 )
 from mercurial.utils import hashutil
+from mercurial.dirstateutils import timestamp
 
 shortname = b'.hglf'
 shortnameslash = shortname + b'/'
@@ -243,10 +244,11 @@
 def lfdirstatestatus(lfdirstate, repo):
     pctx = repo[b'.']
     match = matchmod.always()
-    unsure, s = lfdirstate.status(
+    unsure, s, mtime_boundary = lfdirstate.status(
         match, subrepos=[], ignored=False, clean=False, unknown=False
     )
     modified, clean = s.modified, s.clean
+    wctx = repo[None]
     for lfile in unsure:
         try:
             fctx = pctx[standin(lfile)]
@@ -256,7 +258,13 @@
             modified.append(lfile)
         else:
             clean.append(lfile)
-            lfdirstate.set_clean(lfile)
+            st = wctx[lfile].lstat()
+            mode = st.st_mode
+            size = st.st_size
+            mtime = timestamp.reliable_mtime_of(st, mtime_boundary)
+            if mtime is not None:
+                cache_data = (mode, size, mtime)
+                lfdirstate.set_clean(lfile, cache_data)
     return s
 
 
@@ -663,7 +671,7 @@
         # large.
         lfdirstate = openlfdirstate(ui, repo)
         dirtymatch = matchmod.always()
-        unsure, s = lfdirstate.status(
+        unsure, s, mtime_boundary = lfdirstate.status(
             dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False
         )
         modifiedfiles = unsure + s.modified + s.added + s.removed
--- a/hgext/largefiles/overrides.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/largefiles/overrides.py	Wed Dec 08 10:14:37 2021 +0100
@@ -666,14 +666,12 @@
 
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
-@eh.wrapfunction(filemerge, b'_filemerge')
+@eh.wrapfunction(filemerge, b'filemerge')
 def overridefilemerge(
-    origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
+    origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
 ):
     if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
-        return origfn(
-            premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
-        )
+        return origfn(repo, wctx, mynode, orig, fcd, fco, fca, labels=labels)
 
     ahash = lfutil.readasstandin(fca).lower()
     dhash = lfutil.readasstandin(fcd).lower()
@@ -1519,7 +1517,7 @@
         return orig(repo, matcher, prefix, uipathfn, opts)
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-    unsure, s = lfdirstate.status(
+    unsure, s, mtime_boundary = lfdirstate.status(
         matchmod.always(),
         subrepos=[],
         ignored=False,
@@ -1746,7 +1744,7 @@
         # (*1) deprecated, but used internally (e.g: "rebase --collapse")
 
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-        unsure, s = lfdirstate.status(
+        unsure, s, mtime_boundary = lfdirstate.status(
             matchmod.always(),
             subrepos=[],
             ignored=False,
--- a/hgext/largefiles/reposetup.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/largefiles/reposetup.py	Wed Dec 08 10:14:37 2021 +0100
@@ -22,6 +22,8 @@
     util,
 )
 
+from mercurial.dirstateutils import timestamp
+
 from . import (
     lfcommands,
     lfutil,
@@ -195,7 +197,7 @@
                     match._files = [f for f in match._files if sfindirstate(f)]
                     # Don't waste time getting the ignored and unknown
                     # files from lfdirstate
-                    unsure, s = lfdirstate.status(
+                    unsure, s, mtime_boundary = lfdirstate.status(
                         match,
                         subrepos=[],
                         ignored=False,
@@ -210,6 +212,7 @@
                         s.clean,
                     )
                     if parentworking:
+                        wctx = repo[None]
                         for lfile in unsure:
                             standin = lfutil.standin(lfile)
                             if standin not in ctx1:
@@ -222,7 +225,15 @@
                             else:
                                 if listclean:
                                     clean.append(lfile)
-                                lfdirstate.set_clean(lfile)
+                                s = wctx[lfile].lstat()
+                                mode = s.st_mode
+                                size = s.st_size
+                                mtime = timestamp.reliable_mtime_of(
+                                    s, mtime_boundary
+                                )
+                                if mtime is not None:
+                                    cache_data = (mode, size, mtime)
+                                    lfdirstate.set_clean(lfile, cache_data)
                     else:
                         tocheck = unsure + modified + added + clean
                         modified, added, clean = [], [], []
--- a/hgext/narrow/narrowdirstate.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/narrow/narrowdirstate.py	Wed Dec 08 10:14:37 2021 +0100
@@ -38,8 +38,8 @@
             return super(narrowdirstate, self).normal(*args, **kwargs)
 
         @_editfunc
-        def set_tracked(self, *args):
-            return super(narrowdirstate, self).set_tracked(*args)
+        def set_tracked(self, *args, **kwargs):
+            return super(narrowdirstate, self).set_tracked(*args, **kwargs)
 
         @_editfunc
         def set_untracked(self, *args):
--- a/hgext/remotefilelog/__init__.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/remotefilelog/__init__.py	Wed Dec 08 10:14:37 2021 +0100
@@ -520,7 +520,7 @@
 
 
 # Prefetch files before status attempts to look at their size and contents
-def checklookup(orig, self, files):
+def checklookup(orig, self, files, mtime_boundary):
     repo = self._repo
     if isenabled(repo):
         prefetchfiles = []
@@ -530,7 +530,7 @@
                     prefetchfiles.append((f, hex(parent.filenode(f))))
         # batch fetch the needed files from the server
         repo.fileservice.prefetch(prefetchfiles)
-    return orig(self, files)
+    return orig(self, files, mtime_boundary)
 
 
 # Prefetch the logic that compares added and removed files for renames
--- a/hgext/win32text.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/hgext/win32text.py	Wed Dec 08 10:14:37 2021 +0100
@@ -47,6 +47,8 @@
 from mercurial.i18n import _
 from mercurial.node import short
 from mercurial import (
+    cmdutil,
+    extensions,
     pycompat,
     registrar,
 )
@@ -215,6 +217,23 @@
         repo.adddatafilter(name, fn)
 
 
+def wrap_revert(orig, repo, ctx, names, uipathfn, actions, *args, **kwargs):
+    # reset dirstate cache for file we touch
+    ds = repo.dirstate
+    with ds.parentchange():
+        for filename in actions[b'revert'][0]:
+            entry = ds.get_entry(filename)
+            if entry is not None:
+                if entry.p1_tracked:
+                    ds.update_file(
+                        filename,
+                        entry.tracked,
+                        p1_tracked=True,
+                        p2_info=entry.p2_info,
+                    )
+    return orig(repo, ctx, names, uipathfn, actions, *args, **kwargs)
+
+
 def extsetup(ui):
     # deprecated config: win32text.warn
     if ui.configbool(b'win32text', b'warn'):
@@ -224,3 +243,4 @@
                 b"https://mercurial-scm.org/wiki/Win32TextExtension\n"
             )
         )
+    extensions.wrapfunction(cmdutil, '_performrevert', wrap_revert)
--- a/mercurial/bundle2.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/bundle2.py	Wed Dec 08 10:14:37 2021 +0100
@@ -2419,7 +2419,7 @@
             op.records.add(b'bookmarks', record)
     else:
         raise error.ProgrammingError(
-            b'unkown bookmark mode: %s' % bookmarksmode
+            b'unknown bookmark mode: %s' % bookmarksmode
         )
 
 
--- a/mercurial/cext/parsers.c	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/cext/parsers.c	Wed Dec 08 10:14:37 2021 +0100
@@ -61,11 +61,13 @@
 	int p2_info;
 	int has_meaningful_data;
 	int has_meaningful_mtime;
+	int mtime_second_ambiguous;
 	int mode;
 	int size;
 	int mtime_s;
 	int mtime_ns;
 	PyObject *parentfiledata;
+	PyObject *mtime;
 	PyObject *fallback_exec;
 	PyObject *fallback_symlink;
 	static char *keywords_name[] = {
@@ -78,6 +80,7 @@
 	p2_info = 0;
 	has_meaningful_mtime = 1;
 	has_meaningful_data = 1;
+	mtime_second_ambiguous = 0;
 	parentfiledata = Py_None;
 	fallback_exec = Py_None;
 	fallback_symlink = Py_None;
@@ -118,10 +121,18 @@
 	}
 
 	if (parentfiledata != Py_None) {
-		if (!PyArg_ParseTuple(parentfiledata, "ii(ii)", &mode, &size,
-		                      &mtime_s, &mtime_ns)) {
+		if (!PyArg_ParseTuple(parentfiledata, "iiO", &mode, &size,
+		                      &mtime)) {
 			return NULL;
 		}
+		if (mtime != Py_None) {
+			if (!PyArg_ParseTuple(mtime, "iii", &mtime_s, &mtime_ns,
+			                      &mtime_second_ambiguous)) {
+				return NULL;
+			}
+		} else {
+			has_meaningful_mtime = 0;
+		}
 	} else {
 		has_meaningful_data = 0;
 		has_meaningful_mtime = 0;
@@ -130,6 +141,9 @@
 		t->flags |= dirstate_flag_has_meaningful_data;
 		t->mode = mode;
 		t->size = size;
+		if (mtime_second_ambiguous) {
+			t->flags |= dirstate_flag_mtime_second_ambiguous;
+		}
 	} else {
 		t->mode = 0;
 		t->size = 0;
@@ -255,7 +269,8 @@
 	} else if (!(self->flags & dirstate_flag_has_mtime) ||
 	           !(self->flags & dirstate_flag_p1_tracked) ||
 	           !(self->flags & dirstate_flag_wc_tracked) ||
-	           (self->flags & dirstate_flag_p2_info)) {
+	           (self->flags & dirstate_flag_p2_info) ||
+	           (self->flags & dirstate_flag_mtime_second_ambiguous)) {
 		return ambiguous_time;
 	} else {
 		return self->mtime_s;
@@ -311,33 +326,30 @@
 	return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
 };
 
-static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
-                                          PyObject *now)
-{
-	int now_s;
-	int now_ns;
-	if (!PyArg_ParseTuple(now, "ii", &now_s, &now_ns)) {
-		return NULL;
-	}
-	if (dirstate_item_c_v1_state(self) == 'n' && self->mtime_s == now_s) {
-		Py_RETURN_TRUE;
-	} else {
-		Py_RETURN_FALSE;
-	}
-};
-
 static PyObject *dirstate_item_mtime_likely_equal_to(dirstateItemObject *self,
                                                      PyObject *other)
 {
 	int other_s;
 	int other_ns;
-	if (!PyArg_ParseTuple(other, "ii", &other_s, &other_ns)) {
+	int other_second_ambiguous;
+	if (!PyArg_ParseTuple(other, "iii", &other_s, &other_ns,
+	                      &other_second_ambiguous)) {
 		return NULL;
 	}
-	if ((self->flags & dirstate_flag_has_mtime) &&
-	    self->mtime_s == other_s &&
-	    (self->mtime_ns == other_ns || self->mtime_ns == 0 ||
-	     other_ns == 0)) {
+	if (!(self->flags & dirstate_flag_has_mtime)) {
+		Py_RETURN_FALSE;
+	}
+	if (self->mtime_s != other_s) {
+		Py_RETURN_FALSE;
+	}
+	if (self->mtime_ns == 0 || other_ns == 0) {
+		if (self->flags & dirstate_flag_mtime_second_ambiguous) {
+			Py_RETURN_FALSE;
+		} else {
+			Py_RETURN_TRUE;
+		}
+	}
+	if (self->mtime_ns == other_ns) {
 		Py_RETURN_TRUE;
 	} else {
 		Py_RETURN_FALSE;
@@ -438,14 +450,6 @@
 		              dirstate_flag_has_meaningful_data |
 		              dirstate_flag_has_mtime);
 	}
-	if (t->flags & dirstate_flag_mtime_second_ambiguous) {
-		/* The current code is not able to do the more subtle comparison
-		 * that the MTIME_SECOND_AMBIGUOUS requires. So we ignore the
-		 * mtime */
-		t->flags &= ~(dirstate_flag_mtime_second_ambiguous |
-		              dirstate_flag_has_meaningful_data |
-		              dirstate_flag_has_mtime);
-	}
 	t->mode = 0;
 	if (t->flags & dirstate_flag_has_meaningful_data) {
 		if (t->flags & dirstate_flag_mode_exec_perm) {
@@ -474,14 +478,28 @@
 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
                                          PyObject *args)
 {
-	int size, mode, mtime_s, mtime_ns;
-	if (!PyArg_ParseTuple(args, "ii(ii)", &mode, &size, &mtime_s,
-	                      &mtime_ns)) {
+	int size, mode, mtime_s, mtime_ns, mtime_second_ambiguous;
+	PyObject *mtime;
+	mtime_s = 0;
+	mtime_ns = 0;
+	mtime_second_ambiguous = 0;
+	if (!PyArg_ParseTuple(args, "iiO", &mode, &size, &mtime)) {
 		return NULL;
 	}
+	if (mtime != Py_None) {
+		if (!PyArg_ParseTuple(mtime, "iii", &mtime_s, &mtime_ns,
+		                      &mtime_second_ambiguous)) {
+			return NULL;
+		}
+	} else {
+		self->flags &= ~dirstate_flag_has_mtime;
+	}
 	self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
 	              dirstate_flag_has_meaningful_data |
 	              dirstate_flag_has_mtime;
+	if (mtime_second_ambiguous) {
+		self->flags |= dirstate_flag_mtime_second_ambiguous;
+	}
 	self->mode = mode;
 	self->size = size;
 	self->mtime_s = mtime_s;
@@ -530,8 +548,6 @@
      "return a \"size\" suitable for v1 serialization"},
     {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
      "return a \"mtime\" suitable for v1 serialization"},
-    {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
-     "True if the stored mtime would be ambiguous with the current time"},
     {"mtime_likely_equal_to", (PyCFunction)dirstate_item_mtime_likely_equal_to,
      METH_O, "True if the stored mtime is likely equal to the given mtime"},
     {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
@@ -904,12 +920,9 @@
 	Py_ssize_t nbytes, pos, l;
 	PyObject *k, *v = NULL, *pn;
 	char *p, *s;
-	int now_s;
-	int now_ns;
 
-	if (!PyArg_ParseTuple(args, "O!O!O!(ii):pack_dirstate", &PyDict_Type,
-	                      &map, &PyDict_Type, &copymap, &PyTuple_Type, &pl,
-	                      &now_s, &now_ns)) {
+	if (!PyArg_ParseTuple(args, "O!O!O!:pack_dirstate", &PyDict_Type, &map,
+	                      &PyDict_Type, &copymap, &PyTuple_Type, &pl)) {
 		return NULL;
 	}
 
@@ -978,21 +991,6 @@
 		mode = dirstate_item_c_v1_mode(tuple);
 		size = dirstate_item_c_v1_size(tuple);
 		mtime = dirstate_item_c_v1_mtime(tuple);
-		if (state == 'n' && tuple->mtime_s == now_s) {
-			/* See pure/parsers.py:pack_dirstate for why we do
-			 * this. */
-			mtime = -1;
-			mtime_unset = (PyObject *)dirstate_item_from_v1_data(
-			    state, mode, size, mtime);
-			if (!mtime_unset) {
-				goto bail;
-			}
-			if (PyDict_SetItem(map, k, mtime_unset) == -1) {
-				goto bail;
-			}
-			Py_DECREF(mtime_unset);
-			mtime_unset = NULL;
-		}
 		*p++ = state;
 		putbe32((uint32_t)mode, p);
 		putbe32((uint32_t)size, p + 4);
--- a/mercurial/cmdutil.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/cmdutil.py	Wed Dec 08 10:14:37 2021 +0100
@@ -522,8 +522,10 @@
         # 1. filter patch, since we are intending to apply subset of it
         try:
             chunks, newopts = filterfn(ui, original_headers, match)
-        except error.PatchError as err:
+        except error.PatchParseError as err:
             raise error.InputError(_(b'error parsing patch: %s') % err)
+        except error.PatchApplicationError as err:
+            raise error.StateError(_(b'error applying patch: %s') % err)
         opts.update(newopts)
 
         # We need to keep a backup of files that have been newly added and
@@ -608,8 +610,10 @@
                     ui.debug(b'applying patch\n')
                     ui.debug(fp.getvalue())
                     patch.internalpatch(ui, repo, fp, 1, eolmode=None)
-                except error.PatchError as err:
+                except error.PatchParseError as err:
                     raise error.InputError(pycompat.bytestr(err))
+                except error.PatchApplicationError as err:
+                    raise error.StateError(pycompat.bytestr(err))
             del fp
 
             # 4. We prepared working directory according to filtered
@@ -2020,9 +2024,16 @@
                 eolmode=None,
                 similarity=sim / 100.0,
             )
-        except error.PatchError as e:
+        except error.PatchParseError as e:
+            raise error.InputError(
+                pycompat.bytestr(e),
+                hint=_(
+                    b'check that whitespace in the patch has not been mangled'
+                ),
+            )
+        except error.PatchApplicationError as e:
             if not partial:
-                raise error.Abort(pycompat.bytestr(e))
+                raise error.StateError(pycompat.bytestr(e))
             if partial:
                 rejects = True
 
@@ -2079,8 +2090,15 @@
                     files,
                     eolmode=None,
                 )
-            except error.PatchError as e:
-                raise error.Abort(stringutil.forcebytestr(e))
+            except error.PatchParseError as e:
+                raise error.InputError(
+                    stringutil.forcebytestr(e),
+                    hint=_(
+                        b'check that whitespace in the patch has not been mangled'
+                    ),
+                )
+            except error.PatchApplicationError as e:
+                raise error.StateError(stringutil.forcebytestr(e))
             if opts.get(b'exact'):
                 editor = None
             else:
@@ -3628,15 +3646,14 @@
         prntstatusmsg(b'drop', f)
         repo.dirstate.set_untracked(f)
 
-    normal = None
-    if node == parent:
-        # We're reverting to our parent. If possible, we'd like status
-        # to report the file as clean. We have to use normallookup for
-        # merges to avoid losing information about merged/dirty files.
-        if p2 != repo.nullid:
-            normal = repo.dirstate.set_tracked
-        else:
-            normal = repo.dirstate.set_clean
+    # We are reverting to our parent. If possible, we had like `hg status`
+    # to report the file as clean. We have to be less agressive for
+    # merges to avoid losing information about copy introduced by the merge.
+    # This might comes with bugs ?
+    reset_copy = p2 == repo.nullid
+
+    def normal(filename):
+        return repo.dirstate.set_tracked(filename, reset_copy=reset_copy)
 
     newlyaddedandmodifiedfiles = set()
     if interactive:
@@ -3674,8 +3691,10 @@
             if operation == b'discard':
                 chunks = patch.reversehunks(chunks)
 
-        except error.PatchError as err:
-            raise error.Abort(_(b'error parsing patch: %s') % err)
+        except error.PatchParseError as err:
+            raise error.InputError(_(b'error parsing patch: %s') % err)
+        except error.PatchApplicationError as err:
+            raise error.StateError(_(b'error applying patch: %s') % err)
 
         # FIXME: when doing an interactive revert of a copy, there's no way of
         # performing a partial revert of the added file, the only option is
@@ -3710,8 +3729,10 @@
         if dopatch:
             try:
                 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
-            except error.PatchError as err:
-                raise error.Abort(pycompat.bytestr(err))
+            except error.PatchParseError as err:
+                raise error.InputError(pycompat.bytestr(err))
+            except error.PatchApplicationError as err:
+                raise error.StateError(pycompat.bytestr(err))
         del fp
     else:
         for f in actions[b'revert'][0]:
@@ -3727,9 +3748,6 @@
             checkout(f)
             repo.dirstate.set_tracked(f)
 
-    normal = repo.dirstate.set_tracked
-    if node == parent and p2 == repo.nullid:
-        normal = repo.dirstate.set_clean
     for f in actions[b'undelete'][0]:
         if interactive:
             choice = repo.ui.promptchoice(
--- a/mercurial/commands.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/commands.py	Wed Dec 08 10:14:37 2021 +0100
@@ -6130,7 +6130,6 @@
         ret = 0
         didwork = False
 
-        tocomplete = []
         hasconflictmarkers = []
         if mark:
             markcheck = ui.config(b'commands', b'resolve.mark-check')
@@ -6183,24 +6182,20 @@
                     # preresolve file
                     overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
                     with ui.configoverride(overrides, b'resolve'):
-                        complete, r = ms.preresolve(f, wctx)
-                    if not complete:
-                        tocomplete.append(f)
-                    elif r:
+                        r = ms.resolve(f, wctx)
+                    if r:
                         ret = 1
                 finally:
                     ms.commit()
 
-                # replace filemerge's .orig file with our resolve file, but only
-                # for merges that are complete
-                if complete:
-                    try:
-                        util.rename(
-                            a + b".resolve", scmutil.backuppath(ui, repo, f)
-                        )
-                    except OSError as inst:
-                        if inst.errno != errno.ENOENT:
-                            raise
+                # replace filemerge's .orig file with our resolve file
+                try:
+                    util.rename(
+                        a + b".resolve", scmutil.backuppath(ui, repo, f)
+                    )
+                except OSError as inst:
+                    if inst.errno != errno.ENOENT:
+                        raise
 
         if hasconflictmarkers:
             ui.warn(
@@ -6218,25 +6213,6 @@
                     hint=_(b'use --all to mark anyway'),
                 )
 
-        for f in tocomplete:
-            try:
-                # resolve file
-                overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
-                with ui.configoverride(overrides, b'resolve'):
-                    r = ms.resolve(f, wctx)
-                if r:
-                    ret = 1
-            finally:
-                ms.commit()
-
-            # replace filemerge's .orig file with our resolve file
-            a = repo.wjoin(f)
-            try:
-                util.rename(a + b".resolve", scmutil.backuppath(ui, repo, f))
-            except OSError as inst:
-                if inst.errno != errno.ENOENT:
-                    raise
-
         ms.commit()
         branchmerge = repo.dirstate.p2() != repo.nullid
         # resolve is not doing a parent change here, however, `record updates`
@@ -7832,9 +7808,9 @@
         raise error.InputError(_(b"you can't specify a revision and a date"))
 
     updatecheck = None
-    if check:
+    if check or merge is not None and not merge:
         updatecheck = b'abort'
-    elif merge:
+    elif merge or check is not None and not check:
         updatecheck = b'none'
 
     with repo.wlock():
--- a/mercurial/configitems.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/configitems.py	Wed Dec 08 10:14:37 2021 +0100
@@ -1281,11 +1281,17 @@
 )
 coreconfigitem(
     b'extensions',
-    b'.*',
+    b'[^:]*',
     default=None,
     generic=True,
 )
 coreconfigitem(
+    b'extensions',
+    b'[^:]*:required',
+    default=False,
+    generic=True,
+)
+coreconfigitem(
     b'extdata',
     b'.*',
     default=None,
--- a/mercurial/context.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/context.py	Wed Dec 08 10:14:37 2021 +0100
@@ -46,6 +46,9 @@
     dateutil,
     stringutil,
 )
+from .dirstateutils import (
+    timestamp,
+)
 
 propertycache = util.propertycache
 
@@ -1793,13 +1796,14 @@
             sane.append(f)
         return sane
 
-    def _checklookup(self, files):
+    def _checklookup(self, files, mtime_boundary):
         # check for any possibly clean files
         if not files:
-            return [], [], []
+            return [], [], [], []
 
         modified = []
         deleted = []
+        clean = []
         fixup = []
         pctx = self._parents[0]
         # do a full compare of any files that might have changed
@@ -1813,8 +1817,18 @@
                     or pctx[f].cmp(self[f])
                 ):
                     modified.append(f)
+                elif mtime_boundary is None:
+                    clean.append(f)
                 else:
-                    fixup.append(f)
+                    s = self[f].lstat()
+                    mode = s.st_mode
+                    size = s.st_size
+                    file_mtime = timestamp.reliable_mtime_of(s, mtime_boundary)
+                    if file_mtime is not None:
+                        cache_info = (mode, size, file_mtime)
+                        fixup.append((f, cache_info))
+                    else:
+                        clean.append(f)
             except (IOError, OSError):
                 # A file become inaccessible in between? Mark it as deleted,
                 # matching dirstate behavior (issue5584).
@@ -1824,7 +1838,7 @@
                 # it's in the dirstate.
                 deleted.append(f)
 
-        return modified, deleted, fixup
+        return modified, deleted, clean, fixup
 
     def _poststatusfixup(self, status, fixup):
         """update dirstate for files that are actually clean"""
@@ -1842,13 +1856,13 @@
                     if dirstate.identity() == oldid:
                         if fixup:
                             if dirstate.pendingparentchange():
-                                normal = lambda f: dirstate.update_file(
+                                normal = lambda f, pfd: dirstate.update_file(
                                     f, p1_tracked=True, wc_tracked=True
                                 )
                             else:
                                 normal = dirstate.set_clean
-                            for f in fixup:
-                                normal(f)
+                            for f, pdf in fixup:
+                                normal(f, pdf)
                             # write changes out explicitly, because nesting
                             # wlock at runtime may prevent 'wlock.release()'
                             # after this block from doing so for subsequent
@@ -1878,19 +1892,23 @@
         subrepos = []
         if b'.hgsub' in self:
             subrepos = sorted(self.substate)
-        cmp, s = self._repo.dirstate.status(
+        cmp, s, mtime_boundary = self._repo.dirstate.status(
             match, subrepos, ignored=ignored, clean=clean, unknown=unknown
         )
 
         # check for any possibly clean files
         fixup = []
         if cmp:
-            modified2, deleted2, fixup = self._checklookup(cmp)
+            modified2, deleted2, clean_set, fixup = self._checklookup(
+                cmp, mtime_boundary
+            )
             s.modified.extend(modified2)
             s.deleted.extend(deleted2)
 
+            if clean_set and clean:
+                s.clean.extend(clean_set)
             if fixup and clean:
-                s.clean.extend(fixup)
+                s.clean.extend((f for f, _ in fixup))
 
         self._poststatusfixup(s, fixup)
 
--- a/mercurial/dirstate.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/dirstate.py	Wed Dec 08 10:14:37 2021 +0100
@@ -66,16 +66,6 @@
         return obj._join(fname)
 
 
-def _getfsnow(vfs):
-    '''Get "now" timestamp on filesystem'''
-    tmpfd, tmpname = vfs.mkstemp()
-    try:
-        return timestamp.mtime_of(os.fstat(tmpfd))
-    finally:
-        os.close(tmpfd)
-        vfs.unlink(tmpname)
-
-
 def requires_parents_change(func):
     def wrap(self, *args, **kwargs):
         if not self.pendingparentchange():
@@ -126,7 +116,6 @@
         # UNC path pointing to root share (issue4557)
         self._rootdir = pathutil.normasprefix(root)
         self._dirty = False
-        self._lastnormaltime = timestamp.zero()
         self._ui = ui
         self._filecache = {}
         self._parentwriters = 0
@@ -440,7 +429,6 @@
         for a in ("_map", "_branch", "_ignore"):
             if a in self.__dict__:
                 delattr(self, a)
-        self._lastnormaltime = timestamp.zero()
         self._dirty = False
         self._parentwriters = 0
         self._origpl = None
@@ -462,19 +450,24 @@
         return self._map.copymap
 
     @requires_no_parents_change
-    def set_tracked(self, filename):
+    def set_tracked(self, filename, reset_copy=False):
         """a "public" method for generic code to mark a file as tracked
 
         This function is to be called outside of "update/merge" case. For
         example by a command like `hg add X`.
 
+        if reset_copy is set, any existing copy information will be dropped.
+
         return True the file was previously untracked, False otherwise.
         """
         self._dirty = True
         entry = self._map.get(filename)
         if entry is None or not entry.tracked:
             self._check_new_tracked_filename(filename)
-        return self._map.set_tracked(filename)
+        pre_tracked = self._map.set_tracked(filename)
+        if reset_copy:
+            self._map.copymap.pop(filename, None)
+        return pre_tracked
 
     @requires_no_parents_change
     def set_untracked(self, filename):
@@ -491,21 +484,13 @@
         return ret
 
     @requires_no_parents_change
-    def set_clean(self, filename, parentfiledata=None):
+    def set_clean(self, filename, parentfiledata):
         """record that the current state of the file on disk is known to be clean"""
         self._dirty = True
-        if parentfiledata:
-            (mode, size, mtime) = parentfiledata
-        else:
-            (mode, size, mtime) = self._get_filedata(filename)
         if not self._map[filename].tracked:
             self._check_new_tracked_filename(filename)
+        (mode, size, mtime) = parentfiledata
         self._map.set_clean(filename, mode, size, mtime)
-        if mtime > self._lastnormaltime:
-            # Remember the most recent modification timeslot for status(),
-            # to make sure we won't miss future size-preserving file content
-            # modifications that happen within the same timeslot.
-            self._lastnormaltime = mtime
 
     @requires_no_parents_change
     def set_possibly_dirty(self, filename):
@@ -544,10 +529,6 @@
             if entry is not None and entry.added:
                 return  # avoid dropping copy information (maybe?)
 
-        parentfiledata = None
-        if wc_tracked and p1_tracked:
-            parentfiledata = self._get_filedata(filename)
-
         self._map.reset_state(
             filename,
             wc_tracked,
@@ -555,16 +536,7 @@
             # the underlying reference might have changed, we will have to
             # check it.
             has_meaningful_mtime=False,
-            parentfiledata=parentfiledata,
         )
-        if (
-            parentfiledata is not None
-            and parentfiledata[2] > self._lastnormaltime
-        ):
-            # Remember the most recent modification timeslot for status(),
-            # to make sure we won't miss future size-preserving file content
-            # modifications that happen within the same timeslot.
-            self._lastnormaltime = parentfiledata[2]
 
     @requires_parents_change
     def update_file(
@@ -594,13 +566,6 @@
 
         self._dirty = True
 
-        need_parent_file_data = (
-            not possibly_dirty and not p2_info and wc_tracked and p1_tracked
-        )
-
-        if need_parent_file_data and parentfiledata is None:
-            parentfiledata = self._get_filedata(filename)
-
         self._map.reset_state(
             filename,
             wc_tracked,
@@ -609,14 +574,6 @@
             has_meaningful_mtime=not possibly_dirty,
             parentfiledata=parentfiledata,
         )
-        if (
-            parentfiledata is not None
-            and parentfiledata[2] > self._lastnormaltime
-        ):
-            # Remember the most recent modification timeslot for status(),
-            # to make sure we won't miss future size-preserving file content
-            # modifications that happen within the same timeslot.
-            self._lastnormaltime = parentfiledata[2]
 
     def _check_new_tracked_filename(self, filename):
         scmutil.checkfilename(filename)
@@ -634,14 +591,6 @@
                 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
                 raise error.Abort(msg)
 
-    def _get_filedata(self, filename):
-        """returns"""
-        s = os.lstat(self._join(filename))
-        mode = s.st_mode
-        size = s.st_size
-        mtime = timestamp.mtime_of(s)
-        return (mode, size, mtime)
-
     def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
         if exists is None:
             exists = os.path.lexists(os.path.join(self._root, path))
@@ -720,7 +669,6 @@
 
     def clear(self):
         self._map.clear()
-        self._lastnormaltime = timestamp.zero()
         self._dirty = True
 
     def rebuild(self, parent, allfiles, changedfiles=None):
@@ -728,9 +676,7 @@
             # Rebuild entire dirstate
             to_lookup = allfiles
             to_drop = []
-            lastnormaltime = self._lastnormaltime
             self.clear()
-            self._lastnormaltime = lastnormaltime
         elif len(changedfiles) < 10:
             # Avoid turning allfiles into a set, which can be expensive if it's
             # large.
@@ -779,20 +725,11 @@
 
         filename = self._filename
         if tr:
-            # 'dirstate.write()' is not only for writing in-memory
-            # changes out, but also for dropping ambiguous timestamp.
-            # delayed writing re-raise "ambiguous timestamp issue".
-            # See also the wiki page below for detail:
-            # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
-
-            # record when mtime start to be ambiguous
-            now = _getfsnow(self._opener)
-
             # delay writing in-memory changes out
             tr.addfilegenerator(
                 b'dirstate',
                 (self._filename,),
-                lambda f: self._writedirstate(tr, f, now=now),
+                lambda f: self._writedirstate(tr, f),
                 location=b'plain',
             )
             return
@@ -811,7 +748,7 @@
         """
         self._plchangecallbacks[category] = callback
 
-    def _writedirstate(self, tr, st, now=None):
+    def _writedirstate(self, tr, st):
         # notify callbacks about parents change
         if self._origpl is not None and self._origpl != self._pl:
             for c, callback in sorted(
@@ -820,32 +757,7 @@
                 callback(self, self._origpl, self._pl)
             self._origpl = None
 
-        if now is None:
-            # use the modification time of the newly created temporary file as the
-            # filesystem's notion of 'now'
-            now = timestamp.mtime_of(util.fstat(st))
-
-        # enough 'delaywrite' prevents 'pack_dirstate' from dropping
-        # timestamp of each entries in dirstate, because of 'now > mtime'
-        delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
-        if delaywrite > 0:
-            # do we have any files to delay for?
-            for f, e in pycompat.iteritems(self._map):
-                if e.need_delay(now):
-                    import time  # to avoid useless import
-
-                    # rather than sleep n seconds, sleep until the next
-                    # multiple of n seconds
-                    clock = time.time()
-                    start = int(clock) - (int(clock) % delaywrite)
-                    end = start + delaywrite
-                    time.sleep(end - clock)
-                    # trust our estimate that the end is near now
-                    now = timestamp.timestamp((end, 0))
-                    break
-
-        self._map.write(tr, st, now)
-        self._lastnormaltime = timestamp.zero()
+        self._map.write(tr, st)
         self._dirty = False
 
     def _dirignore(self, f):
@@ -1243,7 +1155,6 @@
             self._rootdir,
             self._ignorefiles(),
             self._checkexec,
-            self._lastnormaltime,
             bool(list_clean),
             bool(list_ignored),
             bool(list_unknown),
@@ -1335,11 +1246,20 @@
             # Some matchers have yet to be implemented
             use_rust = False
 
+        # Get the time from the filesystem so we can disambiguate files that
+        # appear modified in the present or future.
+        try:
+            mtime_boundary = timestamp.get_fs_now(self._opener)
+        except OSError:
+            # In largefiles or readonly context
+            mtime_boundary = None
+
         if use_rust:
             try:
-                return self._rust_status(
+                res = self._rust_status(
                     match, listclean, listignored, listunknown
                 )
+                return res + (mtime_boundary,)
             except rustmod.FallbackError:
                 pass
 
@@ -1361,7 +1281,6 @@
         checkexec = self._checkexec
         checklink = self._checklink
         copymap = self._map.copymap
-        lastnormaltime = self._lastnormaltime
 
         # We need to do full walks when either
         # - we're listing all clean files, or
@@ -1417,19 +1336,17 @@
                     else:
                         madd(fn)
                 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
-                    ladd(fn)
-                elif timestamp.mtime_of(st) == lastnormaltime:
-                    # fn may have just been marked as normal and it may have
-                    # changed in the same second without changing its size.
-                    # This can happen if we quickly do multiple commits.
-                    # Force lookup, so we don't miss such a racy file change.
+                    # There might be a change in the future if for example the
+                    # internal clock is off, but this is a case where the issues
+                    # the user would face would be a lot worse and there is
+                    # nothing we can really do.
                     ladd(fn)
                 elif listclean:
                     cadd(fn)
         status = scmutil.status(
             modified, added, removed, deleted, unknown, ignored, clean
         )
-        return (lookup, status)
+        return (lookup, status, mtime_boundary)
 
     def matches(self, match):
         """
--- a/mercurial/dirstatemap.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/dirstatemap.py	Wed Dec 08 10:14:37 2021 +0100
@@ -444,13 +444,13 @@
         self.__getitem__ = self._map.__getitem__
         self.get = self._map.get
 
-    def write(self, tr, st, now):
+    def write(self, tr, st):
         if self._use_dirstate_v2:
-            packed, meta = v2.pack_dirstate(self._map, self.copymap, now)
+            packed, meta = v2.pack_dirstate(self._map, self.copymap)
             self.write_v2_no_append(tr, st, meta, packed)
         else:
             packed = parsers.pack_dirstate(
-                self._map, self.copymap, self.parents(), now
+                self._map, self.copymap, self.parents()
             )
             st.write(packed)
             st.close()
@@ -655,10 +655,10 @@
             self._map
             return self.identity
 
-        def write(self, tr, st, now):
+        def write(self, tr, st):
             if not self._use_dirstate_v2:
                 p1, p2 = self.parents()
-                packed = self._map.write_v1(p1, p2, now)
+                packed = self._map.write_v1(p1, p2)
                 st.write(packed)
                 st.close()
                 self._dirtyparents = False
@@ -666,7 +666,7 @@
 
             # We can only append to an existing data file if there is one
             can_append = self.docket.uuid is not None
-            packed, meta, append = self._map.write_v2(now, can_append)
+            packed, meta, append = self._map.write_v2(can_append)
             if append:
                 docket = self.docket
                 data_filename = docket.data_filename()
--- a/mercurial/dirstateutils/timestamp.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/dirstateutils/timestamp.py	Wed Dec 08 10:14:37 2021 +0100
@@ -6,8 +6,11 @@
 from __future__ import absolute_import
 
 import functools
+import os
 import stat
 
+from .. import error
+
 
 rangemask = 0x7FFFFFFF
 
@@ -28,30 +31,32 @@
     """
 
     def __new__(cls, value):
-        truncated_seconds, subsec_nanos = value
-        value = (truncated_seconds & rangemask, subsec_nanos)
+        truncated_seconds, subsec_nanos, second_ambiguous = value
+        value = (truncated_seconds & rangemask, subsec_nanos, second_ambiguous)
         return super(timestamp, cls).__new__(cls, value)
 
     def __eq__(self, other):
-        self_secs, self_subsec_nanos = self
-        other_secs, other_subsec_nanos = other
-        return self_secs == other_secs and (
-            self_subsec_nanos == other_subsec_nanos
-            or self_subsec_nanos == 0
-            or other_subsec_nanos == 0
+        raise error.ProgrammingError(
+            'timestamp should never be compared directly'
         )
 
     def __gt__(self, other):
-        self_secs, self_subsec_nanos = self
-        other_secs, other_subsec_nanos = other
-        if self_secs > other_secs:
-            return True
-        if self_secs < other_secs:
-            return False
-        if self_subsec_nanos == 0 or other_subsec_nanos == 0:
-            # they are considered equal, so not "greater than"
-            return False
-        return self_subsec_nanos > other_subsec_nanos
+        raise error.ProgrammingError(
+            'timestamp should never be compared directly'
+        )
+
+
+def get_fs_now(vfs):
+    """return a timestamp for "now" in the current vfs
+
+    This will raise an exception if no temporary files could be created.
+    """
+    tmpfd, tmpname = vfs.mkstemp()
+    try:
+        return mtime_of(os.fstat(tmpfd))
+    finally:
+        os.close(tmpfd)
+        vfs.unlink(tmpname)
 
 
 def zero():
@@ -84,4 +89,36 @@
         secs = nanos // billion
         subsec_nanos = nanos % billion
 
-    return timestamp((secs, subsec_nanos))
+    return timestamp((secs, subsec_nanos, False))
+
+
+def reliable_mtime_of(stat_result, present_mtime):
+    """same as `mtime_of`, but return None if the date might be ambiguous
+
+    A modification time is reliable if it is older than "present_time" (or
+    sufficiently in the future).
+
+    Otherwise a concurrent modification might happens with the same mtime.
+    """
+    file_mtime = mtime_of(stat_result)
+    file_second = file_mtime[0]
+    file_ns = file_mtime[1]
+    boundary_second = present_mtime[0]
+    boundary_ns = present_mtime[1]
+    # If the mtime of the ambiguous file is younger (or equal) to the starting
+    # point of the `status` walk, we cannot garantee that another, racy, write
+    # will not happen right after with the same mtime and we cannot cache the
+    # information.
+    #
+    # However if the mtime is far away in the future, this is likely some
+    # mismatch between the current clock and previous file system operation. So
+    # mtime more than one days in the future are considered fine.
+    if boundary_second == file_second:
+        if file_ns and boundary_ns:
+            if file_ns < boundary_ns:
+                return timestamp((file_second, file_ns, True))
+        return None
+    elif boundary_second < file_second < (3600 * 24 + boundary_second):
+        return None
+    else:
+        return file_mtime
--- a/mercurial/dirstateutils/v2.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/dirstateutils/v2.py	Wed Dec 08 10:14:37 2021 +0100
@@ -174,12 +174,10 @@
         )
 
 
-def pack_dirstate(map, copy_map, now):
+def pack_dirstate(map, copy_map):
     """
     Pack `map` and `copy_map` into the dirstate v2 binary format and return
     the bytearray.
-    `now` is a timestamp of the current filesystem time used to detect race
-    conditions in writing the dirstate to disk, see inline comment.
 
     The on-disk format expects a tree-like structure where the leaves are
     written first (and sorted per-directory), going up levels until the root
@@ -284,17 +282,6 @@
     stack.append(current_node)
 
     for index, (path, entry) in enumerate(sorted_map, 1):
-        if entry.need_delay(now):
-            # The file was last modified "simultaneously" with the current
-            # write to dirstate (i.e. within the same second for file-
-            # systems with a granularity of 1 sec). This commonly happens
-            # for at least a couple of files on 'update'.
-            # The user could change the file without changing its size
-            # within the same second. Invalidate the file's mtime in
-            # dirstate, forcing future 'status' calls to compare the
-            # contents of the file if the size is the same. This prevents
-            # mistakenly treating such files as clean.
-            entry.set_possibly_dirty()
         nodes_with_entry_count += 1
         if path in copy_map:
             nodes_with_copy_source_count += 1
--- a/mercurial/error.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/error.py	Wed Dec 08 10:14:37 2021 +0100
@@ -388,6 +388,14 @@
     __bytes__ = _tobytes
 
 
+class PatchParseError(PatchError):
+    __bytes__ = _tobytes
+
+
+class PatchApplicationError(PatchError):
+    __bytes__ = _tobytes
+
+
 def getsimilar(symbols, value):
     # type: (Iterable[bytes], bytes) -> List[bytes]
     sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
--- a/mercurial/extensions.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/extensions.py	Wed Dec 08 10:14:37 2021 +0100
@@ -282,6 +282,7 @@
     result = ui.configitems(b"extensions")
     if whitelist is not None:
         result = [(k, v) for (k, v) in result if k in whitelist]
+    result = [(k, v) for (k, v) in result if b':' not in k]
     newindex = len(_order)
     ui.log(
         b'extension',
@@ -290,6 +291,8 @@
     )
     ui.log(b'extension', b'- processing %d entries\n', len(result))
     with util.timedcm('load all extensions') as stats:
+        default_sub_options = ui.configsuboptions(b"extensions", b"*")[1]
+
         for (name, path) in result:
             if path:
                 if path[0:1] == b'!':
@@ -306,18 +309,32 @@
             except Exception as inst:
                 msg = stringutil.forcebytestr(inst)
                 if path:
-                    ui.warn(
-                        _(b"*** failed to import extension %s from %s: %s\n")
-                        % (name, path, msg)
+                    error_msg = _(
+                        b'failed to import extension "%s" from %s: %s'
                     )
+                    error_msg %= (name, path, msg)
                 else:
-                    ui.warn(
-                        _(b"*** failed to import extension %s: %s\n")
-                        % (name, msg)
-                    )
-                if isinstance(inst, error.Hint) and inst.hint:
-                    ui.warn(_(b"*** (%s)\n") % inst.hint)
-                ui.traceback()
+                    error_msg = _(b'failed to import extension "%s": %s')
+                    error_msg %= (name, msg)
+
+                options = default_sub_options.copy()
+                ext_options = ui.configsuboptions(b"extensions", name)[1]
+                options.update(ext_options)
+                if stringutil.parsebool(options.get(b"required", b'no')):
+                    hint = None
+                    if isinstance(inst, error.Hint) and inst.hint:
+                        hint = inst.hint
+                    if hint is None:
+                        hint = _(
+                            b"loading of this extension was required, "
+                            b"see `hg help config.extensions` for details"
+                        )
+                    raise error.Abort(error_msg, hint=hint)
+                else:
+                    ui.warn((b"*** %s\n") % error_msg)
+                    if isinstance(inst, error.Hint) and inst.hint:
+                        ui.warn(_(b"*** (%s)\n") % inst.hint)
+                    ui.traceback()
 
     ui.log(
         b'extension',
--- a/mercurial/filemerge.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/filemerge.py	Wed Dec 08 10:14:37 2021 +0100
@@ -846,7 +846,7 @@
         return True, r, False
 
 
-def _formatconflictmarker(ctx, template, label, pad):
+def _formatlabel(ctx, template, label, pad):
     """Applies the given template to the ctx, prefixed by the label.
 
     Pad is the minimum width of the label prefix, so that multiple markers
@@ -893,11 +893,11 @@
     pad = max(len(l) for l in labels)
 
     newlabels = [
-        _formatconflictmarker(cd, tmpl, labels[0], pad),
-        _formatconflictmarker(co, tmpl, labels[1], pad),
+        _formatlabel(cd, tmpl, labels[0], pad),
+        _formatlabel(co, tmpl, labels[1], pad),
     ]
     if len(labels) > 2:
-        newlabels.append(_formatconflictmarker(ca, tmpl, labels[2], pad))
+        newlabels.append(_formatlabel(ca, tmpl, labels[2], pad))
     return newlabels
 
 
@@ -924,7 +924,7 @@
     fcd.write(back.data(), fcd.flags())
 
 
-def _makebackup(repo, ui, wctx, fcd, premerge):
+def _makebackup(repo, ui, wctx, fcd):
     """Makes and returns a filectx-like object for ``fcd``'s backup file.
 
     In addition to preserving the user's pre-existing modifications to `fcd`
@@ -932,8 +932,8 @@
     merge changed anything, and determine what line endings the new file should
     have.
 
-    Backups only need to be written once (right before the premerge) since their
-    content doesn't change afterwards.
+    Backups only need to be written once since their content doesn't change
+    afterwards.
     """
     if fcd.isabsent():
         return None
@@ -950,20 +950,18 @@
         # merging in-memory, we must redirect the backup to the memory context
         # so we don't disturb the working directory.
         relpath = back[len(repo.wvfs.base) + 1 :]
-        if premerge:
-            wctx[relpath].write(fcd.data(), fcd.flags())
+        wctx[relpath].write(fcd.data(), fcd.flags())
         return wctx[relpath]
     else:
-        if premerge:
-            # Otherwise, write to wherever path the user specified the backups
-            # should go. We still need to switch based on whether the source is
-            # in-memory so we can use the fast path of ``util.copy`` if both are
-            # on disk.
-            if isinstance(fcd, context.overlayworkingfilectx):
-                util.writefile(back, fcd.data())
-            else:
-                a = _workingpath(repo, fcd)
-                util.copyfile(a, back)
+        # Otherwise, write to wherever path the user specified the backups
+        # should go. We still need to switch based on whether the source is
+        # in-memory so we can use the fast path of ``util.copy`` if both are
+        # on disk.
+        if isinstance(fcd, context.overlayworkingfilectx):
+            util.writefile(back, fcd.data())
+        else:
+            a = _workingpath(repo, fcd)
+            util.copyfile(a, back)
         # A arbitraryfilectx is returned, so we can run the same functions on
         # the backup context regardless of where it lives.
         return context.arbitraryfilectx(back, repo=repo)
@@ -995,7 +993,7 @@
 
     def tempfromcontext(prefix, ctx):
         f, name = maketempfrompath(prefix, ctx.path())
-        data = repo.wwritedata(ctx.path(), ctx.data())
+        data = ctx.decodeddata()
         f.write(data)
         f.close()
         return name
@@ -1027,7 +1025,7 @@
                 util.unlink(d)
 
 
-def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
+def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
     """perform a 3-way merge in the working directory
 
     premerge = whether this is a premerge
@@ -1102,14 +1100,13 @@
         r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
         return True, r, deleted
 
-    if premerge:
-        if orig != fco.path():
-            ui.status(
-                _(b"merging %s and %s to %s\n")
-                % (uipathfn(orig), uipathfn(fco.path()), fduipath)
-            )
-        else:
-            ui.status(_(b"merging %s\n") % fduipath)
+    if orig != fco.path():
+        ui.status(
+            _(b"merging %s and %s to %s\n")
+            % (uipathfn(orig), uipathfn(fco.path()), fduipath)
+        )
+    else:
+        ui.status(_(b"merging %s\n") % fduipath)
 
     ui.debug(b"my %s other %s ancestor %s\n" % (fcd, fco, fca))
 
@@ -1122,7 +1119,7 @@
             ui.warn(onfailure % fduipath)
         return True, 1, False
 
-    back = _makebackup(repo, ui, wctx, fcd, premerge)
+    back = _makebackup(repo, ui, wctx, fcd)
     files = (None, None, None, back)
     r = 1
     try:
@@ -1140,7 +1137,7 @@
                 repo, fcd, fco, fca, labels, tool=tool
             )
 
-        if premerge and mergetype == fullmerge:
+        if mergetype == fullmerge:
             # conflict markers generated by premerge will use 'detailed'
             # settings if either ui.mergemarkers or the tool's mergemarkers
             # setting is 'detailed'. This way tools can have basic labels in
@@ -1160,8 +1157,9 @@
             r = _premerge(
                 repo, fcd, fco, fca, toolconf, files, labels=premergelabels
             )
-            # complete if premerge successful (r is 0)
-            return not r, r, False
+            # we're done if premerge was successful (r is 0)
+            if not r:
+                return not r, r, False
 
         needcheck, r, deleted = func(
             repo,
@@ -1277,18 +1275,6 @@
     return repo.wjoin(ctx.path())
 
 
-def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
-    return _filemerge(
-        True, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
-    )
-
-
-def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
-    return _filemerge(
-        False, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
-    )
-
-
 def loadinternalmerge(ui, extname, registrarobj):
     """Load internal merge tool from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
--- a/mercurial/helptext/config.txt	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/helptext/config.txt	Wed Dec 08 10:14:37 2021 +0100
@@ -513,13 +513,18 @@
 ``update.check``
     Determines what level of checking :hg:`update` will perform before moving
     to a destination revision. Valid values are ``abort``, ``none``,
-    ``linear``, and ``noconflict``. ``abort`` always fails if the working
-    directory has uncommitted changes. ``none`` performs no checking, and may
-    result in a merge with uncommitted changes. ``linear`` allows any update
-    as long as it follows a straight line in the revision history, and may
-    trigger a merge with uncommitted changes. ``noconflict`` will allow any
-    update which would not trigger a merge with uncommitted changes, if any
-    are present.
+    ``linear``, and ``noconflict``.
+
+    - ``abort`` always fails if the working directory has uncommitted changes.
+
+    - ``none`` performs no checking, and may result in a merge with uncommitted changes.
+
+    - ``linear`` allows any update as long as it follows a straight line in the
+      revision history, and may trigger a merge with uncommitted changes.
+
+    - ``noconflict`` will allow any update which would not trigger a merge with
+      uncommitted changes, if any are present.
+
     (default: ``linear``)
 
 ``update.requiredest``
@@ -850,6 +855,24 @@
   # (this extension will get loaded from the file specified)
   myfeature = ~/.hgext/myfeature.py
 
+If an extension fails to load, a warning will be issued, and Mercurial will
+proceed. To enforce that an extension must be loaded, one can set the `required`
+suboption in the config::
+
+  [extensions]
+  myfeature = ~/.hgext/myfeature.py
+  myfeature:required = yes
+
+To debug extension loading issue, one can add `--traceback` to their mercurial
+invocation.
+
+A default setting can we set using the special `*` extension key::
+
+  [extensions]
+  *:required = yes
+  myfeature = ~/.hgext/myfeature.py
+  rebase=
+
 
 ``format``
 ----------
--- a/mercurial/mdiff.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/mdiff.py	Wed Dec 08 10:14:37 2021 +0100
@@ -84,7 +84,7 @@
         try:
             self.context = int(self.context)
         except ValueError:
-            raise error.Abort(
+            raise error.InputError(
                 _(b'diff context lines count must be an integer, not %r')
                 % pycompat.bytestr(self.context)
             )
--- a/mercurial/merge.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/merge.py	Wed Dec 08 10:14:37 2021 +0100
@@ -542,7 +542,7 @@
                 hint=_(b'merging in the other direction may work'),
             )
         else:
-            raise error.Abort(
+            raise error.StateError(
                 _(b'conflict in file \'%s\' is outside narrow clone') % f
             )
 
@@ -1404,6 +1404,34 @@
                 atomictemp=atomictemp,
             )
             if wantfiledata:
+                # XXX note that there is a race window between the time we
+                # write the clean data into the file and we stats it. So another
+                # writing process meddling with the file content right after we
+                # wrote it could cause bad stat data to be gathered.
+                #
+                # They are 2 data we gather here
+                # - the mode:
+                #       That we actually just wrote, we should not need to read
+                #       it from disk, (except not all mode might have survived
+                #       the disk round-trip, which is another issue: we should
+                #       not depends on this)
+                # - the mtime,
+                #       On system that support nanosecond precision, the mtime
+                #       could be accurate enough to tell the two writes appart.
+                #       However gathering it in a racy way make the mtime we
+                #       gather "unreliable".
+                #
+                # (note: we get the size from the data we write, which is sane)
+                #
+                # So in theory the data returned here are fully racy, but in
+                # practice "it works mostly fine".
+                #
+                # Do not be surprised if you end up reading this while looking
+                # for the causes of some buggy status. Feel free to improve
+                # this in the future, but we cannot simply stop gathering
+                # information. Otherwise `hg status` call made after a large `hg
+                # update` runs would have to redo a similar amount of work to
+                # restore and compare all files content.
                 s = wfctx.lstat()
                 mode = s.st_mode
                 mtime = timestamp.mtime_of(s)
@@ -1690,10 +1718,8 @@
     )
 
     try:
-        # premerge
-        tocomplete = []
         for f, args, msg in mergeactions:
-            repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
+            repo.ui.debug(b" %s: %s -> m\n" % (f, msg))
             ms.addcommitinfo(f, {b'merged': b'yes'})
             progress.increment(item=f)
             if f == b'.hgsubstate':  # subrepo states need updating
@@ -1702,16 +1728,6 @@
                 )
                 continue
             wctx[f].audit()
-            complete, r = ms.preresolve(f, wctx)
-            if not complete:
-                numupdates += 1
-                tocomplete.append((f, args, msg))
-
-        # merge
-        for f, args, msg in tocomplete:
-            repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
-            ms.addcommitinfo(f, {b'merged': b'yes'})
-            progress.increment(item=f, total=numupdates)
             ms.resolve(f, wctx)
 
     except error.InterventionRequired:
@@ -2144,6 +2160,71 @@
                 mresult.len((mergestatemod.ACTION_GET,)) if wantfiledata else 0
             )
             with repo.dirstate.parentchange():
+                ### Filter Filedata
+                #
+                # We gathered "cache" information for the clean file while
+                # updating them: mtime, size and mode.
+                #
+                # At the time this comment is written, they are various issues
+                # with how we gather the `mode` and `mtime` information (see
+                # the comment in `batchget`).
+                #
+                # We are going to smooth one of this issue here : mtime ambiguity.
+                #
+                # i.e. even if the mtime gathered during `batchget` was
+                # correct[1] a change happening right after it could change the
+                # content while keeping the same mtime[2].
+                #
+                # When we reach the current code, the "on disk" part of the
+                # update operation is finished. We still assume that no other
+                # process raced that "on disk" part, but we want to at least
+                # prevent later file change to alter the content of the file
+                # right after the update operation. So quickly that the same
+                # mtime is record for the operation.
+                # To prevent such ambiguity to happens, we will only keep the
+                # "file data" for files with mtime that are stricly in the past,
+                # i.e. whose mtime is strictly lower than the current time.
+                #
+                # This protect us from race conditions from operation that could
+                # run right after this one, especially other Mercurial
+                # operation that could be waiting for the wlock to touch files
+                # content and the dirstate.
+                #
+                # In an ideal world, we could only get reliable information in
+                # `getfiledata` (from `getbatch`), however the current approach
+                # have been a successful compromise since many years.
+                #
+                # At the time this comment is written, not using any "cache"
+                # file data at all here would not be viable. As it would result is
+                # a very large amount of work (equivalent to the previous `hg
+                # update` during the next status after an update).
+                #
+                # [1] the current code cannot grantee that the `mtime` and
+                # `mode` are correct, but the result is "okay in practice".
+                # (see the comment in `batchget`).                #
+                #
+                # [2] using nano-second precision can greatly help here because
+                # it makes the "different write with same mtime" issue
+                # virtually vanish. However, dirstate v1 cannot store such
+                # precision and a bunch of python-runtime, operating-system and
+                # filesystem does not provide use with such precision, so we
+                # have to operate as if it wasn't available.
+                if getfiledata:
+                    ambiguous_mtime = {}
+                    now = timestamp.get_fs_now(repo.vfs)
+                    if now is None:
+                        # we can't write to the FS, so we won't actually update
+                        # the dirstate content anyway, no need to put cache
+                        # information.
+                        getfiledata = None
+                    else:
+                        now_sec = now[0]
+                        for f, m in pycompat.iteritems(getfiledata):
+                            if m is not None and m[2][0] >= now_sec:
+                                ambiguous_mtime[f] = (m[0], m[1], None)
+                        for f, m in pycompat.iteritems(ambiguous_mtime):
+                            getfiledata[f] = m
+
                 repo.setparents(fp1, fp2)
                 mergestatemod.recordupdates(
                     repo, mresult.actionsdict, branchmerge, getfiledata
@@ -2386,13 +2467,13 @@
 
         if confirm:
             nb_ignored = len(status.ignored)
-            nb_unkown = len(status.unknown)
-            if nb_unkown and nb_ignored:
-                msg = _(b"permanently delete %d unkown and %d ignored files?")
-                msg %= (nb_unkown, nb_ignored)
-            elif nb_unkown:
-                msg = _(b"permanently delete %d unkown files?")
-                msg %= nb_unkown
+            nb_unknown = len(status.unknown)
+            if nb_unknown and nb_ignored:
+                msg = _(b"permanently delete %d unknown and %d ignored files?")
+                msg %= (nb_unknown, nb_ignored)
+            elif nb_unknown:
+                msg = _(b"permanently delete %d unknown files?")
+                msg %= nb_unknown
             elif nb_ignored:
                 msg = _(b"permanently delete %d ignored files?")
                 msg %= nb_ignored
--- a/mercurial/mergestate.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/mergestate.py	Wed Dec 08 10:14:37 2021 +0100
@@ -313,16 +313,15 @@
         """return extras stored with the mergestate for the given filename"""
         return self._stateextras[filename]
 
-    def _resolve(self, preresolve, dfile, wctx):
-        """rerun merge process for file path `dfile`.
-        Returns whether the merge was completed and the return value of merge
-        obtained from filemerge._filemerge().
-        """
+    def resolve(self, dfile, wctx):
+        """run merge process for dfile
+
+        Returns the exit code of the merge."""
         if self[dfile] in (
             MERGE_RECORD_RESOLVED,
             LEGACY_RECORD_DRIVER_RESOLVED,
         ):
-            return True, 0
+            return 0
         stateentry = self._state[dfile]
         state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry
         octx = self._repo[self._other]
@@ -341,43 +340,30 @@
         fla = fca.flags()
         if b'x' in flags + flo + fla and b'l' not in flags + flo + fla:
             if fca.rev() == nullrev and flags != flo:
-                if preresolve:
-                    self._repo.ui.warn(
-                        _(
-                            b'warning: cannot merge flags for %s '
-                            b'without common ancestor - keeping local flags\n'
-                        )
-                        % afile
+                self._repo.ui.warn(
+                    _(
+                        b'warning: cannot merge flags for %s '
+                        b'without common ancestor - keeping local flags\n'
                     )
+                    % afile
+                )
             elif flags == fla:
                 flags = flo
-        if preresolve:
-            # restore local
-            if localkey != self._repo.nodeconstants.nullhex:
-                self._restore_backup(wctx[dfile], localkey, flags)
-            else:
-                wctx[dfile].remove(ignoremissing=True)
-            complete, merge_ret, deleted = filemerge.premerge(
-                self._repo,
-                wctx,
-                self._local,
-                lfile,
-                fcd,
-                fco,
-                fca,
-                labels=self._labels,
-            )
+        # restore local
+        if localkey != self._repo.nodeconstants.nullhex:
+            self._restore_backup(wctx[dfile], localkey, flags)
         else:
-            complete, merge_ret, deleted = filemerge.filemerge(
-                self._repo,
-                wctx,
-                self._local,
-                lfile,
-                fcd,
-                fco,
-                fca,
-                labels=self._labels,
-            )
+            wctx[dfile].remove(ignoremissing=True)
+        complete, merge_ret, deleted = filemerge.filemerge(
+            self._repo,
+            wctx,
+            self._local,
+            lfile,
+            fcd,
+            fco,
+            fca,
+            labels=self._labels,
+        )
         if merge_ret is None:
             # If return value of merge is None, then there are no real conflict
             del self._state[dfile]
@@ -406,19 +392,7 @@
                 # else: regular merges (no action necessary)
             self._results[dfile] = merge_ret, action
 
-        return complete, merge_ret
-
-    def preresolve(self, dfile, wctx):
-        """run premerge process for dfile
-
-        Returns whether the merge is complete, and the exit code."""
-        return self._resolve(True, dfile, wctx)
-
-    def resolve(self, dfile, wctx):
-        """run merge process (assuming premerge was run) for dfile
-
-        Returns the exit code of the merge."""
-        return self._resolve(False, dfile, wctx)[1]
+        return merge_ret
 
     def counts(self):
         """return counts for updated, merged and removed files in this
--- a/mercurial/narrowspec.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/narrowspec.py	Wed Dec 08 10:14:37 2021 +0100
@@ -323,7 +323,7 @@
     removedmatch = matchmod.differencematcher(oldmatch, newmatch)
 
     ds = repo.dirstate
-    lookup, status = ds.status(
+    lookup, status, _mtime_boundary = ds.status(
         removedmatch, subrepos=[], ignored=True, clean=True, unknown=True
     )
     trackeddirty = status.modified + status.added
--- a/mercurial/obsutil.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/obsutil.py	Wed Dec 08 10:14:37 2021 +0100
@@ -218,7 +218,7 @@
 
         or
 
-        # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
+        # (A0 rewritten as AX; AX rewritten as A1; AX is unknown locally)
         #
         # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
 
--- a/mercurial/patch.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/patch.py	Wed Dec 08 10:14:37 2021 +0100
@@ -55,6 +55,8 @@
 )
 
 PatchError = error.PatchError
+PatchParseError = error.PatchParseError
+PatchApplicationError = error.PatchApplicationError
 
 # public functions
 
@@ -553,7 +555,9 @@
         if not self.repo.dirstate.get_entry(fname).any_tracked and self.exists(
             fname
         ):
-            raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
+            raise PatchApplicationError(
+                _(b'cannot patch %s: file is not tracked') % fname
+            )
 
     def setfile(self, fname, data, mode, copysource):
         self._checkknown(fname)
@@ -637,7 +641,9 @@
 
     def _checkknown(self, fname):
         if fname not in self.ctx:
-            raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
+            raise PatchApplicationError(
+                _(b'cannot patch %s: file is not tracked') % fname
+            )
 
     def getfile(self, fname):
         try:
@@ -793,7 +799,7 @@
 
     def apply(self, h):
         if not h.complete():
-            raise PatchError(
+            raise PatchParseError(
                 _(b"bad hunk #%d %s (%d %d %d %d)")
                 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
             )
@@ -1388,7 +1394,7 @@
     def read_unified_hunk(self, lr):
         m = unidesc.match(self.desc)
         if not m:
-            raise PatchError(_(b"bad hunk #%d") % self.number)
+            raise PatchParseError(_(b"bad hunk #%d") % self.number)
         self.starta, self.lena, self.startb, self.lenb = m.groups()
         if self.lena is None:
             self.lena = 1
@@ -1405,7 +1411,7 @@
                 lr, self.hunk, self.lena, self.lenb, self.a, self.b
             )
         except error.ParseError as e:
-            raise PatchError(_(b"bad hunk #%d: %s") % (self.number, e))
+            raise PatchParseError(_(b"bad hunk #%d: %s") % (self.number, e))
         # if we hit eof before finishing out the hunk, the last line will
         # be zero length.  Lets try to fix it up.
         while len(self.hunk[-1]) == 0:
@@ -1420,7 +1426,7 @@
         self.desc = lr.readline()
         m = contextdesc.match(self.desc)
         if not m:
-            raise PatchError(_(b"bad hunk #%d") % self.number)
+            raise PatchParseError(_(b"bad hunk #%d") % self.number)
         self.starta, aend = m.groups()
         self.starta = int(self.starta)
         if aend is None:
@@ -1440,7 +1446,7 @@
             elif l.startswith(b'  '):
                 u = b' ' + s
             else:
-                raise PatchError(
+                raise PatchParseError(
                     _(b"bad hunk #%d old text line %d") % (self.number, x)
                 )
             self.a.append(u)
@@ -1454,7 +1460,7 @@
             l = lr.readline()
         m = contextdesc.match(l)
         if not m:
-            raise PatchError(_(b"bad hunk #%d") % self.number)
+            raise PatchParseError(_(b"bad hunk #%d") % self.number)
         self.startb, bend = m.groups()
         self.startb = int(self.startb)
         if bend is None:
@@ -1487,7 +1493,7 @@
                 lr.push(l)
                 break
             else:
-                raise PatchError(
+                raise PatchParseError(
                     _(b"bad hunk #%d old text line %d") % (self.number, x)
                 )
             self.b.append(s)
@@ -1601,7 +1607,7 @@
         while True:
             line = getline(lr, self.hunk)
             if not line:
-                raise PatchError(
+                raise PatchParseError(
                     _(b'could not extract "%s" binary data') % self._fname
                 )
             if line.startswith(b'literal '):
@@ -1622,14 +1628,14 @@
             try:
                 dec.append(util.b85decode(line[1:])[:l])
             except ValueError as e:
-                raise PatchError(
+                raise PatchParseError(
                     _(b'could not decode "%s" binary patch: %s')
                     % (self._fname, stringutil.forcebytestr(e))
                 )
             line = getline(lr, self.hunk)
         text = zlib.decompress(b''.join(dec))
         if len(text) != size:
-            raise PatchError(
+            raise PatchParseError(
                 _(b'"%s" length is %d bytes, should be %d')
                 % (self._fname, len(text), size)
             )
@@ -1847,7 +1853,7 @@
         try:
             p.transitions[state][newstate](p, data)
         except KeyError:
-            raise PatchError(
+            raise PatchParseError(
                 b'unhandled transition: %s -> %s' % (state, newstate)
             )
         state = newstate
@@ -1874,7 +1880,7 @@
     ('a//b/', 'd/e/c')
     >>> pathtransform(b'a/b/c', 3, b'')
     Traceback (most recent call last):
-    PatchError: unable to strip away 1 of 3 dirs from a/b/c
+    PatchApplicationError: unable to strip away 1 of 3 dirs from a/b/c
     """
     pathlen = len(path)
     i = 0
@@ -1884,7 +1890,7 @@
     while count > 0:
         i = path.find(b'/', i)
         if i == -1:
-            raise PatchError(
+            raise PatchApplicationError(
                 _(b"unable to strip away %d of %d dirs from %s")
                 % (count, strip, path)
             )
@@ -1947,7 +1953,7 @@
         elif not nulla:
             fname = afile
         else:
-            raise PatchError(_(b"undefined source and destination files"))
+            raise PatchParseError(_(b"undefined source and destination files"))
 
     gp = patchmeta(fname)
     if create:
@@ -2097,7 +2103,7 @@
                     gp.copy(),
                 )
             if not gitpatches:
-                raise PatchError(
+                raise PatchParseError(
                     _(b'failed to synchronize metadata for "%s"') % afile[2:]
                 )
             newfile = True
@@ -2193,7 +2199,7 @@
             out += binchunk[i:offset_end]
             i += cmd
         else:
-            raise PatchError(_(b'unexpected delta opcode 0'))
+            raise PatchApplicationError(_(b'unexpected delta opcode 0'))
     return out
 
 
@@ -2270,7 +2276,7 @@
                     data, mode = store.getfile(gp.oldpath)[:2]
                     if data is None:
                         # This means that the old path does not exist
-                        raise PatchError(
+                        raise PatchApplicationError(
                             _(b"source file '%s' does not exist") % gp.oldpath
                         )
                 if gp.mode:
@@ -2283,7 +2289,7 @@
                     if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
                         gp.path
                     ):
-                        raise PatchError(
+                        raise PatchApplicationError(
                             _(
                                 b"cannot create %s: destination "
                                 b"already exists"
@@ -2365,7 +2371,7 @@
             scmutil.marktouched(repo, files, similarity)
     code = fp.close()
     if code:
-        raise PatchError(
+        raise PatchApplicationError(
             _(b"patch command failed: %s") % procutil.explainexit(code)
         )
     return fuzz
@@ -2397,7 +2403,7 @@
         files.update(backend.close())
         store.close()
     if ret < 0:
-        raise PatchError(_(b'patch failed to apply'))
+        raise PatchApplicationError(_(b'patch failed to apply'))
     return ret > 0
 
 
--- a/mercurial/pathutil.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/pathutil.py	Wed Dec 08 10:14:37 2021 +0100
@@ -79,20 +79,24 @@
             return
         # AIX ignores "/" at end of path, others raise EISDIR.
         if util.endswithsep(path):
-            raise error.Abort(_(b"path ends in directory separator: %s") % path)
+            raise error.InputError(
+                _(b"path ends in directory separator: %s") % path
+            )
         parts = util.splitpath(path)
         if (
             os.path.splitdrive(path)[0]
             or _lowerclean(parts[0]) in (b'.hg', b'.hg.', b'')
             or pycompat.ospardir in parts
         ):
-            raise error.Abort(_(b"path contains illegal component: %s") % path)
+            raise error.InputError(
+                _(b"path contains illegal component: %s") % path
+            )
         # Windows shortname aliases
         for p in parts:
             if b"~" in p:
                 first, last = p.split(b"~", 1)
                 if last.isdigit() and first.upper() in [b"HG", b"HG8B6C"]:
-                    raise error.Abort(
+                    raise error.InputError(
                         _(b"path contains illegal component: %s") % path
                     )
         if b'.hg' in _lowerclean(path):
@@ -101,7 +105,7 @@
                 if p in lparts[1:]:
                     pos = lparts.index(p)
                     base = os.path.join(*parts[:pos])
-                    raise error.Abort(
+                    raise error.InputError(
                         _(b"path '%s' is inside nested repo %r")
                         % (path, pycompat.bytestr(base))
                     )
--- a/mercurial/pure/parsers.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/pure/parsers.py	Wed Dec 08 10:14:37 2021 +0100
@@ -104,6 +104,7 @@
     _mtime_ns = attr.ib()
     _fallback_exec = attr.ib()
     _fallback_symlink = attr.ib()
+    _mtime_second_ambiguous = attr.ib()
 
     def __init__(
         self,
@@ -127,24 +128,27 @@
         self._size = None
         self._mtime_s = None
         self._mtime_ns = None
+        self._mtime_second_ambiguous = False
         if parentfiledata is None:
             has_meaningful_mtime = False
             has_meaningful_data = False
+        elif parentfiledata[2] is None:
+            has_meaningful_mtime = False
         if has_meaningful_data:
             self._mode = parentfiledata[0]
             self._size = parentfiledata[1]
         if has_meaningful_mtime:
-            self._mtime_s, self._mtime_ns = parentfiledata[2]
+            (
+                self._mtime_s,
+                self._mtime_ns,
+                self._mtime_second_ambiguous,
+            ) = parentfiledata[2]
 
     @classmethod
     def from_v2_data(cls, flags, size, mtime_s, mtime_ns):
         """Build a new DirstateItem object from V2 data"""
         has_mode_size = bool(flags & DIRSTATE_V2_HAS_MODE_AND_SIZE)
         has_meaningful_mtime = bool(flags & DIRSTATE_V2_HAS_MTIME)
-        if flags & DIRSTATE_V2_MTIME_SECOND_AMBIGUOUS:
-            # The current code is not able to do the more subtle comparison that the
-            # MTIME_SECOND_AMBIGUOUS requires. So we ignore the mtime
-            has_meaningful_mtime = False
         mode = None
 
         if flags & +DIRSTATE_V2_EXPECTED_STATE_IS_MODIFIED:
@@ -171,13 +175,15 @@
                 mode |= stat.S_IFLNK
             else:
                 mode |= stat.S_IFREG
+
+        second_ambiguous = flags & DIRSTATE_V2_MTIME_SECOND_AMBIGUOUS
         return cls(
             wc_tracked=bool(flags & DIRSTATE_V2_WDIR_TRACKED),
             p1_tracked=bool(flags & DIRSTATE_V2_P1_TRACKED),
             p2_info=bool(flags & DIRSTATE_V2_P2_INFO),
             has_meaningful_data=has_mode_size,
             has_meaningful_mtime=has_meaningful_mtime,
-            parentfiledata=(mode, size, (mtime_s, mtime_ns)),
+            parentfiledata=(mode, size, (mtime_s, mtime_ns, second_ambiguous)),
             fallback_exec=fallback_exec,
             fallback_symlink=fallback_symlink,
         )
@@ -214,13 +220,13 @@
                     wc_tracked=True,
                     p1_tracked=True,
                     has_meaningful_mtime=False,
-                    parentfiledata=(mode, size, (42, 0)),
+                    parentfiledata=(mode, size, (42, 0, False)),
                 )
             else:
                 return cls(
                     wc_tracked=True,
                     p1_tracked=True,
-                    parentfiledata=(mode, size, (mtime, 0)),
+                    parentfiledata=(mode, size, (mtime, 0, False)),
                 )
         else:
             raise RuntimeError(b'unknown state: %s' % state)
@@ -246,7 +252,7 @@
         self._p1_tracked = True
         self._mode = mode
         self._size = size
-        self._mtime_s, self._mtime_ns = mtime
+        self._mtime_s, self._mtime_ns, self._mtime_second_ambiguous = mtime
 
     def set_tracked(self):
         """mark a file as tracked in the working copy
@@ -301,10 +307,22 @@
         if self_sec is None:
             return False
         self_ns = self._mtime_ns
-        other_sec, other_ns = other_mtime
-        return self_sec == other_sec and (
-            self_ns == other_ns or self_ns == 0 or other_ns == 0
-        )
+        other_sec, other_ns, second_ambiguous = other_mtime
+        if self_sec != other_sec:
+            # seconds are different theses mtime are definitly not equal
+            return False
+        elif other_ns == 0 or self_ns == 0:
+            # at least one side as no nano-seconds information
+
+            if self._mtime_second_ambiguous:
+                # We cannot trust the mtime in this case
+                return False
+            else:
+                # the "seconds" value was reliable on its own. We are good to go.
+                return True
+        else:
+            # We have nano second information, let us use them !
+            return self_ns == other_ns
 
     @property
     def state(self):
@@ -463,6 +481,8 @@
                 flags |= DIRSTATE_V2_MODE_IS_SYMLINK
         if self._mtime_s is not None:
             flags |= DIRSTATE_V2_HAS_MTIME
+        if self._mtime_second_ambiguous:
+            flags |= DIRSTATE_V2_MTIME_SECOND_AMBIGUOUS
 
         if self._fallback_exec is not None:
             flags |= DIRSTATE_V2_HAS_FALLBACK_EXEC
@@ -531,13 +551,11 @@
             return AMBIGUOUS_TIME
         elif not self._p1_tracked:
             return AMBIGUOUS_TIME
+        elif self._mtime_second_ambiguous:
+            return AMBIGUOUS_TIME
         else:
             return self._mtime_s
 
-    def need_delay(self, now):
-        """True if the stored mtime would be ambiguous with the current time"""
-        return self.v1_state() == b'n' and self._mtime_s == now[0]
-
 
 def gettype(q):
     return int(q & 0xFFFF)
@@ -903,23 +921,11 @@
     return parents
 
 
-def pack_dirstate(dmap, copymap, pl, now):
+def pack_dirstate(dmap, copymap, pl):
     cs = stringio()
     write = cs.write
     write(b"".join(pl))
     for f, e in pycompat.iteritems(dmap):
-        if e.need_delay(now):
-            # The file was last modified "simultaneously" with the current
-            # write to dirstate (i.e. within the same second for file-
-            # systems with a granularity of 1 sec). This commonly happens
-            # for at least a couple of files on 'update'.
-            # The user could change the file without changing its size
-            # within the same second. Invalidate the file's mtime in
-            # dirstate, forcing future 'status' calls to compare the
-            # contents of the file if the size is the same. This prevents
-            # mistakenly treating such files as clean.
-            e.set_possibly_dirty()
-
         if f in copymap:
             f = b"%s\0%s" % (f, copymap[f])
         e = _pack(
--- a/mercurial/scmutil.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/scmutil.py	Wed Dec 08 10:14:37 2021 +0100
@@ -180,6 +180,8 @@
             )
         )
     except error.RepoError as inst:
+        if isinstance(inst, error.RepoLookupError):
+            detailed_exit_code = 10
         ui.error(_(b"abort: %s\n") % inst)
         if inst.hint:
             ui.error(_(b"(%s)\n") % inst.hint)
@@ -341,7 +343,7 @@
         if fl in self._loweredfiles and f not in self._dirstate:
             msg = _(b'possible case-folding collision for %s') % f
             if self._abort:
-                raise error.Abort(msg)
+                raise error.StateError(msg)
             self._ui.warn(_(b"warning: %s\n") % msg)
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
--- a/mercurial/simplemerge.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/mercurial/simplemerge.py	Wed Dec 08 10:14:37 2021 +0100
@@ -544,7 +544,7 @@
                 name_a=name_a, name_b=name_b, **pycompat.strkwargs(extrakwargs)
             )
         )
-        conflicts = m3.conflicts
+        conflicts = m3.conflicts and not mode == b'union'
 
     # merge flags if necessary
     flags = localctx.flags()
@@ -562,5 +562,5 @@
     else:
         localctx.write(mergedtext, flags)
 
-    if conflicts and not mode == b'union':
+    if conflicts:
         return 1
--- a/relnotes/next	Mon Dec 06 10:08:04 2021 +0100
+++ b/relnotes/next	Wed Dec 08 10:14:37 2021 +0100
@@ -11,6 +11,7 @@
 
 == Bug Fixes ==
 
+The `--no-check` and `--no-merge` now properly overwrite the behavior from `commands.update.check`.
 
 == Backwards Compatibility Changes ==
 
--- a/rust/hg-core/src/config/config.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/config/config.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -114,6 +114,7 @@
             b"rhg",
             b"fallback-executable",
         );
+        config.add_for_environment_variable("RHG_STATUS", b"rhg", b"status");
 
         // HGRCPATH replaces user config
         if opt_rc_path.is_none() {
@@ -361,6 +362,15 @@
         Ok(self.get_option(section, item)?.unwrap_or(false))
     }
 
+    /// Returns `true` if the extension is enabled, `false` otherwise
+    pub fn is_extension_enabled(&self, extension: &[u8]) -> bool {
+        let value = self.get(b"extensions", extension);
+        match value {
+            Some(c) => !c.starts_with(b"!"),
+            None => false,
+        }
+    }
+
     /// If there is an `item` value in `section`, parse and return a list of
     /// byte strings.
     pub fn get_list(
@@ -402,6 +412,13 @@
             .collect()
     }
 
+    /// Returns whether any key is defined in the given section
+    pub fn has_non_empty_section(&self, section: &[u8]) -> bool {
+        self.layers
+            .iter()
+            .any(|layer| layer.has_non_empty_section(section))
+    }
+
     /// Get raw values bytes from all layers (even untrusted ones) in order
     /// of precedence.
     #[cfg(test)]
--- a/rust/hg-core/src/config/layer.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/config/layer.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -127,6 +127,13 @@
             .flat_map(|section| section.keys().map(|vec| &**vec))
     }
 
+    /// Returns whether any key is defined in the given section
+    pub fn has_non_empty_section(&self, section: &[u8]) -> bool {
+        self.sections
+            .get(section)
+            .map_or(false, |section| !section.is_empty())
+    }
+
     pub fn is_empty(&self) -> bool {
         self.sections.is_empty()
     }
--- a/rust/hg-core/src/dirstate/entry.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/dirstate/entry.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -43,6 +43,10 @@
     truncated_seconds: u32,
     /// Always in the `0 .. 1_000_000_000` range.
     nanoseconds: u32,
+    /// TODO this should be in DirstateEntry, but the current code needs
+    /// refactoring to use DirstateEntry instead of TruncatedTimestamp for
+    /// comparison.
+    pub second_ambiguous: bool,
 }
 
 impl TruncatedTimestamp {
@@ -50,11 +54,16 @@
     /// and truncate the seconds components to its lower 31 bits.
     ///
     /// Panics if the nanoseconds components is not in the expected range.
-    pub fn new_truncate(seconds: i64, nanoseconds: u32) -> Self {
+    pub fn new_truncate(
+        seconds: i64,
+        nanoseconds: u32,
+        second_ambiguous: bool,
+    ) -> Self {
         assert!(nanoseconds < NSEC_PER_SEC);
         Self {
             truncated_seconds: seconds as u32 & RANGE_MASK_31BIT,
             nanoseconds,
+            second_ambiguous,
         }
     }
 
@@ -63,6 +72,7 @@
     pub fn from_already_truncated(
         truncated_seconds: u32,
         nanoseconds: u32,
+        second_ambiguous: bool,
     ) -> Result<Self, DirstateV2ParseError> {
         if truncated_seconds & !RANGE_MASK_31BIT == 0
             && nanoseconds < NSEC_PER_SEC
@@ -70,6 +80,7 @@
             Ok(Self {
                 truncated_seconds,
                 nanoseconds,
+                second_ambiguous,
             })
         } else {
             Err(DirstateV2ParseError)
@@ -83,7 +94,7 @@
             let seconds = metadata.mtime();
             // i64 -> u32 with value always in the `0 .. NSEC_PER_SEC` range
             let nanoseconds = metadata.mtime_nsec().try_into().unwrap();
-            Ok(Self::new_truncate(seconds, nanoseconds))
+            Ok(Self::new_truncate(seconds, nanoseconds, false))
         }
         #[cfg(not(unix))]
         {
@@ -91,6 +102,35 @@
         }
     }
 
+    /// Returns whether this timestamp is reliable as the "mtime" of a file.
+    ///
+    /// A modification time is reliable if it is older than `boundary` (or
+    /// sufficiently in the future).
+    ///
+    /// Otherwise a concurrent modification might happens with the same mtime.
+    pub fn is_reliable_mtime(&self, boundary: &Self) -> bool {
+        // If the mtime of the ambiguous file is younger (or equal) to the
+        // starting point of the `status` walk, we cannot garantee that
+        // another, racy, write will not happen right after with the same mtime
+        // and we cannot cache the information.
+        //
+        // However if the mtime is far away in the future, this is likely some
+        // mismatch between the current clock and previous file system
+        // operation. So mtime more than one days in the future are considered
+        // fine.
+        if self.truncated_seconds == boundary.truncated_seconds {
+            self.nanoseconds != 0
+                && boundary.nanoseconds != 0
+                && self.nanoseconds < boundary.nanoseconds
+        } else {
+            // `truncated_seconds` is less than 2**31,
+            // so this does not overflow `u32`:
+            let one_day_later = boundary.truncated_seconds + 24 * 3600;
+            self.truncated_seconds < boundary.truncated_seconds
+                || self.truncated_seconds > one_day_later
+        }
+    }
+
     /// The lower 31 bits of the number of seconds since the epoch.
     pub fn truncated_seconds(&self) -> u32 {
         self.truncated_seconds
@@ -122,10 +162,17 @@
     /// in that way, doing a simple comparison would cause many false
     /// negatives.
     pub fn likely_equal(self, other: Self) -> bool {
-        self.truncated_seconds == other.truncated_seconds
-            && (self.nanoseconds == other.nanoseconds
-                || self.nanoseconds == 0
-                || other.nanoseconds == 0)
+        if self.truncated_seconds != other.truncated_seconds {
+            false
+        } else if self.nanoseconds == 0 || other.nanoseconds == 0 {
+            if self.second_ambiguous {
+                false
+            } else {
+                true
+            }
+        } else {
+            self.nanoseconds == other.nanoseconds
+        }
     }
 
     pub fn likely_equal_to_mtime_of(
@@ -168,12 +215,12 @@
                 }
             }
         };
-        Self::new_truncate(seconds, nanoseconds)
+        Self::new_truncate(seconds, nanoseconds, false)
     }
 }
 
 const NSEC_PER_SEC: u32 = 1_000_000_000;
-const RANGE_MASK_31BIT: u32 = 0x7FFF_FFFF;
+pub const RANGE_MASK_31BIT: u32 = 0x7FFF_FFFF;
 
 pub const MTIME_UNSET: i32 = -1;
 
@@ -258,9 +305,10 @@
                     let mode = u32::try_from(mode).unwrap();
                     let size = u32::try_from(size).unwrap();
                     let mtime = u32::try_from(mtime).unwrap();
-                    let mtime =
-                        TruncatedTimestamp::from_already_truncated(mtime, 0)
-                            .unwrap();
+                    let mtime = TruncatedTimestamp::from_already_truncated(
+                        mtime, 0, false,
+                    )
+                    .unwrap();
                     Self {
                         flags: Flags::WDIR_TRACKED | Flags::P1_TRACKED,
                         mode_size: Some((mode, size)),
@@ -438,7 +486,11 @@
         } else if !self.flags.contains(Flags::P1_TRACKED) {
             MTIME_UNSET
         } else if let Some(mtime) = self.mtime {
-            i32::try_from(mtime.truncated_seconds()).unwrap()
+            if mtime.second_ambiguous {
+                MTIME_UNSET
+            } else {
+                i32::try_from(mtime.truncated_seconds()).unwrap()
+            }
         } else {
             MTIME_UNSET
         }
@@ -580,10 +632,8 @@
         &self,
         filesystem_metadata: &std::fs::Metadata,
     ) -> bool {
-        use std::os::unix::fs::MetadataExt;
-        const EXEC_BIT_MASK: u32 = 0o100;
-        let dirstate_exec_bit = (self.mode() as u32) & EXEC_BIT_MASK;
-        let fs_exec_bit = filesystem_metadata.mode() & EXEC_BIT_MASK;
+        let dirstate_exec_bit = (self.mode() as u32 & EXEC_BIT_MASK) != 0;
+        let fs_exec_bit = has_exec_bit(filesystem_metadata);
         dirstate_exec_bit != fs_exec_bit
     }
 
@@ -592,16 +642,6 @@
     pub fn debug_tuple(&self) -> (u8, i32, i32, i32) {
         (self.state().into(), self.mode(), self.size(), self.mtime())
     }
-
-    /// True if the stored mtime would be ambiguous with the current time
-    pub fn need_delay(&self, now: TruncatedTimestamp) -> bool {
-        if let Some(mtime) = self.mtime {
-            self.state() == EntryState::Normal
-                && mtime.truncated_seconds() == now.truncated_seconds()
-        } else {
-            false
-        }
-    }
 }
 
 impl EntryState {
@@ -641,3 +681,11 @@
         }
     }
 }
+
+const EXEC_BIT_MASK: u32 = 0o100;
+
+pub fn has_exec_bit(metadata: &std::fs::Metadata) -> bool {
+    // TODO: How to handle executable permissions on Windows?
+    use std::os::unix::fs::MetadataExt;
+    (metadata.mode() & EXEC_BIT_MASK) != 0
+}
--- a/rust/hg-core/src/dirstate/status.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/dirstate/status.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -12,7 +12,6 @@
 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
 
 use crate::{
-    dirstate::TruncatedTimestamp,
     utils::hg_path::{HgPath, HgPathError},
     PatternError,
 };
@@ -62,10 +61,6 @@
 
 #[derive(Debug, Copy, Clone)]
 pub struct StatusOptions {
-    /// Remember the most recent modification timeslot for status, to make
-    /// sure we won't miss future size-preserving file content modifications
-    /// that happen within the same timeslot.
-    pub last_normal_time: TruncatedTimestamp,
     /// Whether we are on a filesystem with UNIX-like exec flags
     pub check_exec: bool,
     pub list_clean: bool,
@@ -78,6 +73,10 @@
 
 #[derive(Debug, Default)]
 pub struct DirstateStatus<'a> {
+    /// The current time at the start of the `status()` algorithm, as measured
+    /// and possibly truncated by the filesystem.
+    pub filesystem_time_at_status_start: Option<std::time::SystemTime>,
+
     /// Tracked files whose contents have changed since the parent revision
     pub modified: Vec<HgPathCow<'a>>,
 
--- a/rust/hg-core/src/dirstate_tree/dirstate_map.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/dirstate_tree/dirstate_map.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -677,25 +677,6 @@
         })
     }
 
-    fn clear_known_ambiguous_mtimes(
-        &mut self,
-        paths: &[impl AsRef<HgPath>],
-    ) -> Result<(), DirstateV2ParseError> {
-        for path in paths {
-            if let Some(node) = Self::get_node_mut(
-                self.on_disk,
-                &mut self.unreachable_bytes,
-                &mut self.root,
-                path.as_ref(),
-            )? {
-                if let NodeData::Entry(entry) = &mut node.data {
-                    entry.set_possibly_dirty();
-                }
-            }
-        }
-        Ok(())
-    }
-
     fn count_dropped_path(unreachable_bytes: &mut u32, path: &Cow<HgPath>) {
         if let Cow::Borrowed(path) = path {
             *unreachable_bytes += path.len() as u32
@@ -928,31 +909,22 @@
 
     #[timed]
     pub fn pack_v1(
-        &mut self,
+        &self,
         parents: DirstateParents,
-        now: TruncatedTimestamp,
     ) -> Result<Vec<u8>, DirstateError> {
-        let map = self.get_map_mut();
-        let mut ambiguous_mtimes = Vec::new();
+        let map = self.get_map();
         // Optizimation (to be measured?): pre-compute size to avoid `Vec`
         // reallocations
         let mut size = parents.as_bytes().len();
         for node in map.iter_nodes() {
             let node = node?;
-            if let Some(entry) = node.entry()? {
+            if node.entry()?.is_some() {
                 size += packed_entry_size(
                     node.full_path(map.on_disk)?,
                     node.copy_source(map.on_disk)?,
                 );
-                if entry.need_delay(now) {
-                    ambiguous_mtimes.push(
-                        node.full_path_borrowed(map.on_disk)?
-                            .detach_from_tree(),
-                    )
-                }
             }
         }
-        map.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
 
         let mut packed = Vec::with_capacity(size);
         packed.extend(parents.as_bytes());
@@ -977,27 +949,10 @@
     /// (false).
     #[timed]
     pub fn pack_v2(
-        &mut self,
-        now: TruncatedTimestamp,
+        &self,
         can_append: bool,
-    ) -> Result<(Vec<u8>, Vec<u8>, bool), DirstateError> {
-        let map = self.get_map_mut();
-        let mut paths = Vec::new();
-        for node in map.iter_nodes() {
-            let node = node?;
-            if let Some(entry) = node.entry()? {
-                if entry.need_delay(now) {
-                    paths.push(
-                        node.full_path_borrowed(map.on_disk)?
-                            .detach_from_tree(),
-                    )
-                }
-            }
-        }
-        // Borrow of `self` ends here since we collect cloned paths
-
-        map.clear_known_ambiguous_mtimes(&paths)?;
-
+    ) -> Result<(Vec<u8>, on_disk::TreeMetadata, bool), DirstateError> {
+        let map = self.get_map();
         on_disk::write(map, can_append)
     }
 
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/dirstate_tree/on_disk.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -14,8 +14,10 @@
 use bytes_cast::unaligned::{U16Be, U32Be};
 use bytes_cast::BytesCast;
 use format_bytes::format_bytes;
+use rand::Rng;
 use std::borrow::Cow;
 use std::convert::{TryFrom, TryInto};
+use std::fmt::Write;
 
 /// Added at the start of `.hg/dirstate` when the "v2" format is used.
 /// This a redundant sanity check more than an actual "magic number" since
@@ -61,14 +63,14 @@
 
 pub struct Docket<'on_disk> {
     header: &'on_disk DocketHeader,
-    uuid: &'on_disk [u8],
+    pub uuid: &'on_disk [u8],
 }
 
 /// Fields are documented in the *Tree metadata in the docket file*
 /// section of `mercurial/helptext/internals/dirstate-v2.txt`
 #[derive(BytesCast)]
 #[repr(C)]
-struct TreeMetadata {
+pub struct TreeMetadata {
     root_nodes: ChildNodes,
     nodes_with_entry_count: Size,
     nodes_with_copy_source_count: Size,
@@ -186,7 +188,51 @@
     }
 }
 
+impl TreeMetadata {
+    pub fn as_bytes(&self) -> &[u8] {
+        BytesCast::as_bytes(self)
+    }
+}
+
 impl<'on_disk> Docket<'on_disk> {
+    /// Generate the identifier for a new data file
+    ///
+    /// TODO: support the `HGTEST_UUIDFILE` environment variable.
+    /// See `mercurial/revlogutils/docket.py`
+    pub fn new_uid() -> String {
+        const ID_LENGTH: usize = 8;
+        let mut id = String::with_capacity(ID_LENGTH);
+        let mut rng = rand::thread_rng();
+        for _ in 0..ID_LENGTH {
+            // One random hexadecimal digit.
+            // `unwrap` never panics because `impl Write for String`
+            // never returns an error.
+            write!(&mut id, "{:x}", rng.gen_range(0, 16)).unwrap();
+        }
+        id
+    }
+
+    pub fn serialize(
+        parents: DirstateParents,
+        tree_metadata: TreeMetadata,
+        data_size: u64,
+        uuid: &[u8],
+    ) -> Result<Vec<u8>, std::num::TryFromIntError> {
+        let header = DocketHeader {
+            marker: *V2_FORMAT_MARKER,
+            parent_1: parents.p1.pad_to_256_bits(),
+            parent_2: parents.p2.pad_to_256_bits(),
+            metadata: tree_metadata,
+            data_size: u32::try_from(data_size)?.into(),
+            uuid_size: uuid.len().try_into()?,
+        };
+        let header = header.as_bytes();
+        let mut docket = Vec::with_capacity(header.len() + uuid.len());
+        docket.extend_from_slice(header);
+        docket.extend_from_slice(uuid);
+        Ok(docket)
+    }
+
     pub fn parents(&self) -> DirstateParents {
         use crate::Node;
         let p1 = Node::try_from(&self.header.parent_1[..USED_NODE_ID_BYTES])
@@ -371,11 +417,12 @@
         let mtime = if self.flags().contains(Flags::HAS_MTIME)
             && !self.flags().contains(Flags::DIRECTORY)
             && !self.flags().contains(Flags::EXPECTED_STATE_IS_MODIFIED)
-            // The current code is not able to do the more subtle comparison that the
-            // MTIME_SECOND_AMBIGUOUS requires. So we ignore the mtime
-            && !self.flags().contains(Flags::MTIME_SECOND_AMBIGUOUS)
         {
-            Some(self.mtime.try_into()?)
+            let mut m: TruncatedTimestamp = self.mtime.try_into()?;
+            if self.flags().contains(Flags::MTIME_SECOND_AMBIGUOUS) {
+                m.second_ambiguous = true;
+            }
+            Some(m)
         } else {
             None
         };
@@ -465,6 +512,9 @@
         };
         let mtime = if let Some(m) = mtime_opt {
             flags.insert(Flags::HAS_MTIME);
+            if m.second_ambiguous {
+                flags.insert(Flags::MTIME_SECOND_AMBIGUOUS);
+            };
             m.into()
         } else {
             PackedTruncatedTimestamp::null()
@@ -549,9 +599,9 @@
 /// `dirstate_map.on_disk` (true), instead of written to a new data file
 /// (false).
 pub(super) fn write(
-    dirstate_map: &mut DirstateMap,
+    dirstate_map: &DirstateMap,
     can_append: bool,
-) -> Result<(Vec<u8>, Vec<u8>, bool), DirstateError> {
+) -> Result<(Vec<u8>, TreeMetadata, bool), DirstateError> {
     let append = can_append && dirstate_map.write_should_append();
 
     // This ignores the space for paths, and for nodes without an entry.
@@ -577,7 +627,7 @@
         unused: [0; 4],
         ignore_patterns_hash: dirstate_map.ignore_patterns_hash,
     };
-    Ok((writer.out, meta.as_bytes().to_vec(), append))
+    Ok((writer.out, meta, append))
 }
 
 struct Writer<'dmap, 'on_disk> {
@@ -773,6 +823,7 @@
         Self::from_already_truncated(
             timestamp.truncated_seconds.get(),
             timestamp.nanoseconds.get(),
+            false,
         )
     }
 }
--- a/rust/hg-core/src/dirstate_tree/status.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/dirstate_tree/status.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -61,16 +61,21 @@
             (Box::new(|&_| true), vec![], None)
         };
 
+    let filesystem_time_at_status_start = filesystem_now(&root_dir).ok();
+    let outcome = DirstateStatus {
+        filesystem_time_at_status_start,
+        ..Default::default()
+    };
     let common = StatusCommon {
         dmap,
         options,
         matcher,
         ignore_fn,
-        outcome: Default::default(),
+        outcome: Mutex::new(outcome),
         ignore_patterns_have_changed: patterns_changed,
         new_cachable_directories: Default::default(),
         outated_cached_directories: Default::default(),
-        filesystem_time_at_status_start: filesystem_now(&root_dir).ok(),
+        filesystem_time_at_status_start,
     };
     let is_at_repo_root = true;
     let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
@@ -532,8 +537,12 @@
             if let Some(dirstate_mtime) = entry.truncated_mtime() {
                 let fs_mtime = TruncatedTimestamp::for_mtime_of(fs_metadata)
                     .expect("OS/libc does not support mtime?");
+                // There might be a change in the future if for example the
+                // internal clock become off while process run, but this is a
+                // case where the issues the user would face
+                // would be a lot worse and there is nothing we
+                // can really do.
                 mtime_looks_clean = fs_mtime.likely_equal(dirstate_mtime)
-                    && !fs_mtime.likely_equal(self.options.last_normal_time)
             } else {
                 // No mtime in the dirstate entry
                 mtime_looks_clean = false
--- a/rust/hg-core/src/errors.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/errors.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -151,6 +151,8 @@
     /// Converts a `Result` with `std::io::Error` into one with `HgError`.
     fn when_reading_file(self, path: &std::path::Path) -> Result<T, HgError>;
 
+    fn when_writing_file(self, path: &std::path::Path) -> Result<T, HgError>;
+
     fn with_context(
         self,
         context: impl FnOnce() -> IoErrorContext,
@@ -162,6 +164,10 @@
         self.with_context(|| IoErrorContext::ReadingFile(path.to_owned()))
     }
 
+    fn when_writing_file(self, path: &std::path::Path) -> Result<T, HgError> {
+        self.with_context(|| IoErrorContext::WritingFile(path.to_owned()))
+    }
+
     fn with_context(
         self,
         context: impl FnOnce() -> IoErrorContext,
--- a/rust/hg-core/src/lib.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/lib.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -29,6 +29,7 @@
 pub mod revlog;
 pub use revlog::*;
 pub mod config;
+pub mod lock;
 pub mod logging;
 pub mod operations;
 pub mod revset;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/lock.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -0,0 +1,187 @@
+//! Filesystem-based locks for local repositories
+
+use crate::errors::HgError;
+use crate::errors::HgResultExt;
+use crate::utils::StrExt;
+use crate::vfs::Vfs;
+use std::io;
+use std::io::ErrorKind;
+
+#[derive(derive_more::From)]
+pub enum LockError {
+    AlreadyHeld,
+    #[from]
+    Other(HgError),
+}
+
+/// Try to call `f` with the lock acquired, without waiting.
+///
+/// If the lock is aready held, `f` is not called and `LockError::AlreadyHeld`
+/// is returned. `LockError::Io` is returned for any unexpected I/O error
+/// accessing the lock file, including for removing it after `f` was called.
+/// The return value of `f` is dropped in that case. If all is successful, the
+/// return value of `f` is forwarded.
+pub fn try_with_lock_no_wait<R>(
+    hg_vfs: Vfs,
+    lock_filename: &str,
+    f: impl FnOnce() -> R,
+) -> Result<R, LockError> {
+    let our_lock_data = &*OUR_LOCK_DATA;
+    for _retry in 0..5 {
+        match make_lock(hg_vfs, lock_filename, our_lock_data) {
+            Ok(()) => {
+                let result = f();
+                unlock(hg_vfs, lock_filename)?;
+                return Ok(result);
+            }
+            Err(HgError::IoError { error, .. })
+                if error.kind() == ErrorKind::AlreadyExists =>
+            {
+                let lock_data = read_lock(hg_vfs, lock_filename)?;
+                if lock_data.is_none() {
+                    // Lock was apparently just released, retry acquiring it
+                    continue;
+                }
+                if !lock_should_be_broken(&lock_data) {
+                    return Err(LockError::AlreadyHeld);
+                }
+                // The lock file is left over from a process not running
+                // anymore. Break it, but with another lock to
+                // avoid a race.
+                break_lock(hg_vfs, lock_filename)?;
+
+                // Retry acquiring
+            }
+            Err(error) => Err(error)?,
+        }
+    }
+    Err(LockError::AlreadyHeld)
+}
+
+fn break_lock(hg_vfs: Vfs, lock_filename: &str) -> Result<(), LockError> {
+    try_with_lock_no_wait(hg_vfs, &format!("{}.break", lock_filename), || {
+        // Check again in case some other process broke and
+        // acquired the lock in the meantime
+        let lock_data = read_lock(hg_vfs, lock_filename)?;
+        if !lock_should_be_broken(&lock_data) {
+            return Err(LockError::AlreadyHeld);
+        }
+        Ok(hg_vfs.remove_file(lock_filename)?)
+    })?
+}
+
+#[cfg(unix)]
+fn make_lock(
+    hg_vfs: Vfs,
+    lock_filename: &str,
+    data: &str,
+) -> Result<(), HgError> {
+    // Use a symbolic link because creating it is atomic.
+    // The link’s "target" contains data not representing any path.
+    let fake_symlink_target = data;
+    hg_vfs.create_symlink(lock_filename, fake_symlink_target)
+}
+
+fn read_lock(
+    hg_vfs: Vfs,
+    lock_filename: &str,
+) -> Result<Option<String>, HgError> {
+    let link_target =
+        hg_vfs.read_link(lock_filename).io_not_found_as_none()?;
+    if let Some(target) = link_target {
+        let data = target
+            .into_os_string()
+            .into_string()
+            .map_err(|_| HgError::corrupted("non-UTF-8 lock data"))?;
+        Ok(Some(data))
+    } else {
+        Ok(None)
+    }
+}
+
+fn unlock(hg_vfs: Vfs, lock_filename: &str) -> Result<(), HgError> {
+    hg_vfs.remove_file(lock_filename)
+}
+
+/// Return whether the process that is/was holding the lock is known not to be
+/// running anymore.
+fn lock_should_be_broken(data: &Option<String>) -> bool {
+    (|| -> Option<bool> {
+        let (prefix, pid) = data.as_ref()?.split_2(':')?;
+        if prefix != &*LOCK_PREFIX {
+            return Some(false);
+        }
+        let process_is_running;
+
+        #[cfg(unix)]
+        {
+            let pid: libc::pid_t = pid.parse().ok()?;
+            unsafe {
+                let signal = 0; // Test if we could send a signal, without sending
+                let result = libc::kill(pid, signal);
+                if result == 0 {
+                    process_is_running = true
+                } else {
+                    let errno =
+                        io::Error::last_os_error().raw_os_error().unwrap();
+                    process_is_running = errno != libc::ESRCH
+                }
+            }
+        }
+
+        Some(!process_is_running)
+    })()
+    .unwrap_or(false)
+}
+
+lazy_static::lazy_static! {
+    /// A string which is used to differentiate pid namespaces
+    ///
+    /// It's useful to detect "dead" processes and remove stale locks with
+    /// confidence. Typically it's just hostname. On modern linux, we include an
+    /// extra Linux-specific pid namespace identifier.
+    static ref LOCK_PREFIX: String = {
+        // Note: this must match the behavior of `_getlockprefix` in `mercurial/lock.py`
+
+        /// Same as https://github.com/python/cpython/blob/v3.10.0/Modules/socketmodule.c#L5414
+        const BUFFER_SIZE: usize = 1024;
+        let mut buffer = [0_i8; BUFFER_SIZE];
+        let hostname_bytes = unsafe {
+            let result = libc::gethostname(buffer.as_mut_ptr(), BUFFER_SIZE);
+            if result != 0 {
+                panic!("gethostname: {}", io::Error::last_os_error())
+            }
+            std::ffi::CStr::from_ptr(buffer.as_mut_ptr()).to_bytes()
+        };
+        let hostname =
+            std::str::from_utf8(hostname_bytes).expect("non-UTF-8 hostname");
+
+        #[cfg(target_os = "linux")]
+        {
+            use std::os::linux::fs::MetadataExt;
+            match std::fs::metadata("/proc/self/ns/pid") {
+                Ok(meta) => {
+                    return format!("{}/{:x}", hostname, meta.st_ino())
+                }
+                Err(error) => {
+                    // TODO: match on `error.kind()` when `NotADirectory`
+                    // is available on all supported Rust versions:
+                    // https://github.com/rust-lang/rust/issues/86442
+                    use libc::{
+                        ENOENT, // ErrorKind::NotFound
+                        ENOTDIR, // ErrorKind::NotADirectory
+                        EACCES, // ErrorKind::PermissionDenied
+                    };
+                    match error.raw_os_error() {
+                        Some(ENOENT) | Some(ENOTDIR) | Some(EACCES) => {}
+                        _ => panic!("stat /proc/self/ns/pid: {}", error),
+                    }
+                }
+            }
+        }
+
+        hostname.to_owned()
+    };
+
+    static ref OUR_LOCK_DATA: String = format!("{}:{}", &*LOCK_PREFIX, std::process::id());
+}
--- a/rust/hg-core/src/matchers.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/matchers.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -22,6 +22,7 @@
     PatternSyntax,
 };
 
+use crate::dirstate::status::IgnoreFnType;
 use crate::filepatterns::normalize_path_bytes;
 use std::borrow::ToOwned;
 use std::collections::HashSet;
@@ -246,7 +247,7 @@
 /// ```
 pub struct IncludeMatcher<'a> {
     patterns: Vec<u8>,
-    match_fn: Box<dyn for<'r> Fn(&'r HgPath) -> bool + 'a + Sync>,
+    match_fn: IgnoreFnType<'a>,
     /// Whether all the patterns match a prefix (i.e. recursively)
     prefix: bool,
     roots: HashSet<HgPathBuf>,
@@ -341,9 +342,9 @@
 
 /// Returns the regex pattern and a function that matches an `HgPath` against
 /// said regex formed by the given ignore patterns.
-fn build_regex_match(
-    ignore_patterns: &[IgnorePattern],
-) -> PatternResult<(Vec<u8>, Box<dyn Fn(&HgPath) -> bool + Sync>)> {
+fn build_regex_match<'a, 'b>(
+    ignore_patterns: &'a [IgnorePattern],
+) -> PatternResult<(Vec<u8>, IgnoreFnType<'b>)> {
     let mut regexps = vec![];
     let mut exact_set = HashSet::new();
 
@@ -365,10 +366,10 @@
         let func = move |filename: &HgPath| {
             exact_set.contains(filename) || matcher(filename)
         };
-        Box::new(func) as Box<dyn Fn(&HgPath) -> bool + Sync>
+        Box::new(func) as IgnoreFnType
     } else {
         let func = move |filename: &HgPath| exact_set.contains(filename);
-        Box::new(func) as Box<dyn Fn(&HgPath) -> bool + Sync>
+        Box::new(func) as IgnoreFnType
     };
 
     Ok((full_regex, func))
@@ -476,8 +477,8 @@
 /// should be matched.
 fn build_match<'a, 'b>(
     ignore_patterns: Vec<IgnorePattern>,
-) -> PatternResult<(Vec<u8>, Box<dyn Fn(&HgPath) -> bool + 'b + Sync>)> {
-    let mut match_funcs: Vec<Box<dyn Fn(&HgPath) -> bool + Sync>> = vec![];
+) -> PatternResult<(Vec<u8>, IgnoreFnType<'b>)> {
+    let mut match_funcs: Vec<IgnoreFnType<'b>> = vec![];
     // For debugging and printing
     let mut patterns = vec![];
 
@@ -560,14 +561,11 @@
 /// Parses all "ignore" files with their recursive includes and returns a
 /// function that checks whether a given file (in the general sense) should be
 /// ignored.
-pub fn get_ignore_function<'a>(
+pub fn get_ignore_matcher<'a>(
     mut all_pattern_files: Vec<PathBuf>,
     root_dir: &Path,
     inspect_pattern_bytes: &mut impl FnMut(&[u8]),
-) -> PatternResult<(
-    Box<dyn for<'r> Fn(&'r HgPath) -> bool + Sync + 'a>,
-    Vec<PatternFileWarning>,
-)> {
+) -> PatternResult<(IncludeMatcher<'a>, Vec<PatternFileWarning>)> {
     let mut all_patterns = vec![];
     let mut all_warnings = vec![];
 
@@ -590,10 +588,25 @@
         all_warnings.extend(warnings);
     }
     let matcher = IncludeMatcher::new(all_patterns)?;
-    Ok((
-        Box::new(move |path: &HgPath| matcher.matches(path)),
-        all_warnings,
-    ))
+    Ok((matcher, all_warnings))
+}
+
+/// Parses all "ignore" files with their recursive includes and returns a
+/// function that checks whether a given file (in the general sense) should be
+/// ignored.
+pub fn get_ignore_function<'a>(
+    all_pattern_files: Vec<PathBuf>,
+    root_dir: &Path,
+    inspect_pattern_bytes: &mut impl FnMut(&[u8]),
+) -> PatternResult<(IgnoreFnType<'a>, Vec<PatternFileWarning>)> {
+    let res =
+        get_ignore_matcher(all_pattern_files, root_dir, inspect_pattern_bytes);
+    res.map(|(matcher, all_warnings)| {
+        let res: IgnoreFnType<'a> =
+            Box::new(move |path: &HgPath| matcher.matches(path));
+
+        (res, all_warnings)
+    })
 }
 
 impl<'a> IncludeMatcher<'a> {
@@ -628,6 +641,10 @@
             .chain(self.parents.iter());
         DirsChildrenMultiset::new(thing, Some(&self.parents))
     }
+
+    pub fn debug_get_patterns(&self) -> &[u8] {
+        self.patterns.as_ref()
+    }
 }
 
 impl<'a> Display for IncludeMatcher<'a> {
--- a/rust/hg-core/src/operations/cat.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/operations/cat.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -11,6 +11,9 @@
 
 use crate::utils::hg_path::HgPath;
 
+use crate::errors::HgError;
+use crate::manifest::Manifest;
+use crate::manifest::ManifestEntry;
 use itertools::put_back;
 use itertools::PutBack;
 use std::cmp::Ordering;
@@ -28,46 +31,43 @@
 }
 
 // Find an item in an iterator over a sorted collection.
-fn find_item<'a, 'b, 'c, D, I: Iterator<Item = (&'a HgPath, D)>>(
-    i: &mut PutBack<I>,
-    needle: &'b HgPath,
-) -> Option<D> {
+fn find_item<'a>(
+    i: &mut PutBack<impl Iterator<Item = Result<ManifestEntry<'a>, HgError>>>,
+    needle: &HgPath,
+) -> Result<Option<Node>, HgError> {
     loop {
         match i.next() {
-            None => return None,
-            Some(val) => match needle.as_bytes().cmp(val.0.as_bytes()) {
-                Ordering::Less => {
-                    i.put_back(val);
-                    return None;
+            None => return Ok(None),
+            Some(result) => {
+                let entry = result?;
+                match needle.as_bytes().cmp(entry.path.as_bytes()) {
+                    Ordering::Less => {
+                        i.put_back(Ok(entry));
+                        return Ok(None);
+                    }
+                    Ordering::Greater => continue,
+                    Ordering::Equal => return Ok(Some(entry.node_id()?)),
                 }
-                Ordering::Greater => continue,
-                Ordering::Equal => return Some(val.1),
-            },
+            }
         }
     }
 }
 
-fn find_files_in_manifest<
-    'manifest,
-    'query,
-    Data,
-    Manifest: Iterator<Item = (&'manifest HgPath, Data)>,
-    Query: Iterator<Item = &'query HgPath>,
->(
-    manifest: Manifest,
-    query: Query,
-) -> (Vec<(&'query HgPath, Data)>, Vec<&'query HgPath>) {
-    let mut manifest = put_back(manifest);
+fn find_files_in_manifest<'query>(
+    manifest: &Manifest,
+    query: impl Iterator<Item = &'query HgPath>,
+) -> Result<(Vec<(&'query HgPath, Node)>, Vec<&'query HgPath>), HgError> {
+    let mut manifest = put_back(manifest.iter());
     let mut res = vec![];
     let mut missing = vec![];
 
     for file in query {
-        match find_item(&mut manifest, file) {
+        match find_item(&mut manifest, file)? {
             None => missing.push(file),
             Some(item) => res.push((file, item)),
         }
     }
-    return (res, missing);
+    return Ok((res, missing));
 }
 
 /// Output the given revision of files
@@ -92,14 +92,13 @@
     files.sort_unstable();
 
     let (found, missing) = find_files_in_manifest(
-        manifest.files_with_nodes(),
+        &manifest,
         files.into_iter().map(|f| f.as_ref()),
-    );
+    )?;
 
-    for (file_path, node_bytes) in found {
+    for (file_path, file_node) in found {
         found_any = true;
         let file_log = repo.filelog(file_path)?;
-        let file_node = Node::from_hex_for_repo(node_bytes)?;
         results.push((
             file_path,
             file_log.data_for_node(file_node)?.into_data()?,
--- a/rust/hg-core/src/operations/list_tracked_files.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/operations/list_tracked_files.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -76,7 +76,7 @@
 pub struct FilesForRev(Manifest);
 
 impl FilesForRev {
-    pub fn iter(&self) -> impl Iterator<Item = &HgPath> {
-        self.0.files()
+    pub fn iter(&self) -> impl Iterator<Item = Result<&HgPath, HgError>> {
+        self.0.iter().map(|entry| Ok(entry?.path))
     }
 }
--- a/rust/hg-core/src/repo.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/repo.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -2,10 +2,12 @@
 use crate::config::{Config, ConfigError, ConfigParseError};
 use crate::dirstate::DirstateParents;
 use crate::dirstate_tree::dirstate_map::DirstateMap;
+use crate::dirstate_tree::on_disk::Docket as DirstateDocket;
 use crate::dirstate_tree::owning::OwningDirstateMap;
-use crate::errors::HgError;
 use crate::errors::HgResultExt;
+use crate::errors::{HgError, IoResultExt};
 use crate::exit_codes;
+use crate::lock::{try_with_lock_no_wait, LockError};
 use crate::manifest::{Manifest, Manifestlog};
 use crate::revlog::filelog::Filelog;
 use crate::revlog::revlog::RevlogError;
@@ -15,8 +17,11 @@
 use crate::vfs::{is_dir, is_file, Vfs};
 use crate::{requirements, NodePrefix};
 use crate::{DirstateError, Revision};
-use std::cell::{Cell, Ref, RefCell, RefMut};
+use std::cell::{Ref, RefCell, RefMut};
 use std::collections::HashSet;
+use std::io::Seek;
+use std::io::SeekFrom;
+use std::io::Write as IoWrite;
 use std::path::{Path, PathBuf};
 
 /// A repository on disk
@@ -26,8 +31,8 @@
     store: PathBuf,
     requirements: HashSet<String>,
     config: Config,
-    // None means not known/initialized yet
-    dirstate_parents: Cell<Option<DirstateParents>>,
+    dirstate_parents: LazyCell<DirstateParents, HgError>,
+    dirstate_data_file_uuid: LazyCell<Option<Vec<u8>>, HgError>,
     dirstate_map: LazyCell<OwningDirstateMap, DirstateError>,
     changelog: LazyCell<Changelog, HgError>,
     manifestlog: LazyCell<Manifestlog, HgError>,
@@ -202,7 +207,10 @@
             store: store_path,
             dot_hg,
             config: repo_config,
-            dirstate_parents: Cell::new(None),
+            dirstate_parents: LazyCell::new(Self::read_dirstate_parents),
+            dirstate_data_file_uuid: LazyCell::new(
+                Self::read_dirstate_data_file_uuid,
+            ),
             dirstate_map: LazyCell::new(Self::new_dirstate_map),
             changelog: LazyCell::new(Changelog::open),
             manifestlog: LazyCell::new(Manifestlog::open),
@@ -243,11 +251,26 @@
         }
     }
 
+    pub fn try_with_wlock_no_wait<R>(
+        &self,
+        f: impl FnOnce() -> R,
+    ) -> Result<R, LockError> {
+        try_with_lock_no_wait(self.hg_vfs(), "wlock", f)
+    }
+
     pub fn has_dirstate_v2(&self) -> bool {
         self.requirements
             .contains(requirements::DIRSTATE_V2_REQUIREMENT)
     }
 
+    pub fn has_sparse(&self) -> bool {
+        self.requirements.contains(requirements::SPARSE_REQUIREMENT)
+    }
+
+    pub fn has_narrow(&self) -> bool {
+        self.requirements.contains(requirements::NARROW_REQUIREMENT)
+    }
+
     fn dirstate_file_contents(&self) -> Result<Vec<u8>, HgError> {
         Ok(self
             .hg_vfs()
@@ -257,32 +280,64 @@
     }
 
     pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
-        if let Some(parents) = self.dirstate_parents.get() {
-            return Ok(parents);
-        }
+        Ok(*self.dirstate_parents.get_or_init(self)?)
+    }
+
+    fn read_dirstate_parents(&self) -> Result<DirstateParents, HgError> {
         let dirstate = self.dirstate_file_contents()?;
         let parents = if dirstate.is_empty() {
+            if self.has_dirstate_v2() {
+                self.dirstate_data_file_uuid.set(None);
+            }
             DirstateParents::NULL
         } else if self.has_dirstate_v2() {
-            crate::dirstate_tree::on_disk::read_docket(&dirstate)?.parents()
+            let docket =
+                crate::dirstate_tree::on_disk::read_docket(&dirstate)?;
+            self.dirstate_data_file_uuid
+                .set(Some(docket.uuid.to_owned()));
+            docket.parents()
         } else {
             crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
                 .clone()
         };
-        self.dirstate_parents.set(Some(parents));
+        self.dirstate_parents.set(parents);
         Ok(parents)
     }
 
+    fn read_dirstate_data_file_uuid(
+        &self,
+    ) -> Result<Option<Vec<u8>>, HgError> {
+        assert!(
+            self.has_dirstate_v2(),
+            "accessing dirstate data file ID without dirstate-v2"
+        );
+        let dirstate = self.dirstate_file_contents()?;
+        if dirstate.is_empty() {
+            self.dirstate_parents.set(DirstateParents::NULL);
+            Ok(None)
+        } else {
+            let docket =
+                crate::dirstate_tree::on_disk::read_docket(&dirstate)?;
+            self.dirstate_parents.set(docket.parents());
+            Ok(Some(docket.uuid.to_owned()))
+        }
+    }
+
     fn new_dirstate_map(&self) -> Result<OwningDirstateMap, DirstateError> {
         let dirstate_file_contents = self.dirstate_file_contents()?;
         if dirstate_file_contents.is_empty() {
-            self.dirstate_parents.set(Some(DirstateParents::NULL));
+            self.dirstate_parents.set(DirstateParents::NULL);
+            if self.has_dirstate_v2() {
+                self.dirstate_data_file_uuid.set(None);
+            }
             Ok(OwningDirstateMap::new_empty(Vec::new()))
         } else if self.has_dirstate_v2() {
             let docket = crate::dirstate_tree::on_disk::read_docket(
                 &dirstate_file_contents,
             )?;
-            self.dirstate_parents.set(Some(docket.parents()));
+            self.dirstate_parents.set(docket.parents());
+            self.dirstate_data_file_uuid
+                .set(Some(docket.uuid.to_owned()));
             let data_size = docket.data_size();
             let metadata = docket.tree_metadata();
             let mut map = if let Some(data_mmap) = self
@@ -302,7 +357,7 @@
             let (on_disk, placeholder) = map.get_pair_mut();
             let (inner, parents) = DirstateMap::new_v1(on_disk)?;
             self.dirstate_parents
-                .set(Some(parents.unwrap_or(DirstateParents::NULL)));
+                .set(parents.unwrap_or(DirstateParents::NULL));
             *placeholder = inner;
             Ok(map)
         }
@@ -365,6 +420,70 @@
     pub fn filelog(&self, path: &HgPath) -> Result<Filelog, HgError> {
         Filelog::open(self, path)
     }
+
+    /// Write to disk any updates that were made through `dirstate_map_mut`.
+    ///
+    /// The "wlock" must be held while calling this.
+    /// See for example `try_with_wlock_no_wait`.
+    ///
+    /// TODO: have a `WritableRepo` type only accessible while holding the
+    /// lock?
+    pub fn write_dirstate(&self) -> Result<(), DirstateError> {
+        let map = self.dirstate_map()?;
+        // TODO: Maintain a `DirstateMap::dirty` flag, and return early here if
+        // it’s unset
+        let parents = self.dirstate_parents()?;
+        let packed_dirstate = if self.has_dirstate_v2() {
+            let uuid = self.dirstate_data_file_uuid.get_or_init(self)?;
+            let mut uuid = uuid.as_ref();
+            let can_append = uuid.is_some();
+            let (data, tree_metadata, append) = map.pack_v2(can_append)?;
+            if !append {
+                uuid = None
+            }
+            let uuid = if let Some(uuid) = uuid {
+                std::str::from_utf8(uuid)
+                    .map_err(|_| {
+                        HgError::corrupted("non-UTF-8 dirstate data file ID")
+                    })?
+                    .to_owned()
+            } else {
+                DirstateDocket::new_uid()
+            };
+            let data_filename = format!("dirstate.{}", uuid);
+            let data_filename = self.hg_vfs().join(data_filename);
+            let mut options = std::fs::OpenOptions::new();
+            if append {
+                options.append(true);
+            } else {
+                options.write(true).create_new(true);
+            }
+            let data_size = (|| {
+                // TODO: loop and try another random ID if !append and this
+                // returns `ErrorKind::AlreadyExists`? Collision chance of two
+                // random IDs is one in 2**32
+                let mut file = options.open(&data_filename)?;
+                file.write_all(&data)?;
+                file.flush()?;
+                // TODO: use https://doc.rust-lang.org/std/io/trait.Seek.html#method.stream_position when we require Rust 1.51+
+                file.seek(SeekFrom::Current(0))
+            })()
+            .when_writing_file(&data_filename)?;
+            DirstateDocket::serialize(
+                parents,
+                tree_metadata,
+                data_size,
+                uuid.as_bytes(),
+            )
+            .map_err(|_: std::num::TryFromIntError| {
+                HgError::corrupted("overflow in dirstate docket serialization")
+            })?
+        } else {
+            map.pack_v1(parents)?
+        };
+        self.hg_vfs().atomic_write("dirstate", &packed_dirstate)?;
+        Ok(())
+    }
 }
 
 /// Lazily-initialized component of `Repo` with interior mutability
@@ -386,6 +505,10 @@
         }
     }
 
+    fn set(&self, value: T) {
+        *self.value.borrow_mut() = Some(value)
+    }
+
     fn get_or_init(&self, repo: &Repo) -> Result<Ref<T>, E> {
         let mut borrowed = self.value.borrow();
         if borrowed.is_none() {
@@ -399,7 +522,7 @@
         Ok(Ref::map(borrowed, |option| option.as_ref().unwrap()))
     }
 
-    pub fn get_mut_or_init(&self, repo: &Repo) -> Result<RefMut<T>, E> {
+    fn get_mut_or_init(&self, repo: &Repo) -> Result<RefMut<T>, E> {
         let mut borrowed = self.value.borrow_mut();
         if borrowed.is_none() {
             *borrowed = Some((self.init)(repo)?);
--- a/rust/hg-core/src/requirements.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/requirements.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -88,6 +88,10 @@
     // When it starts writing to the repository, it’ll need to either keep the
     // persistent nodemap up to date or remove this entry:
     NODEMAP_REQUIREMENT,
+    // Not all commands support `sparse` and `narrow`. The commands that do
+    // not should opt out by checking `has_sparse` and `has_narrow`.
+    SPARSE_REQUIREMENT,
+    NARROW_REQUIREMENT,
 ];
 
 // Copied from mercurial/requirements.py:
--- a/rust/hg-core/src/revlog/manifest.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/revlog/manifest.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -4,6 +4,7 @@
 use crate::revlog::Revision;
 use crate::revlog::{Node, NodePrefix};
 use crate::utils::hg_path::HgPath;
+use crate::utils::SliceExt;
 
 /// A specialized `Revlog` to work with `manifest` data format.
 pub struct Manifestlog {
@@ -55,47 +56,64 @@
 }
 
 impl Manifest {
-    /// Return an iterator over the lines of the entry.
-    pub fn lines(&self) -> impl Iterator<Item = &[u8]> {
+    pub fn iter(
+        &self,
+    ) -> impl Iterator<Item = Result<ManifestEntry, HgError>> {
         self.bytes
             .split(|b| b == &b'\n')
             .filter(|line| !line.is_empty())
-    }
-
-    /// Return an iterator over the files of the entry.
-    pub fn files(&self) -> impl Iterator<Item = &HgPath> {
-        self.lines().filter(|line| !line.is_empty()).map(|line| {
-            let pos = line
-                .iter()
-                .position(|x| x == &b'\0')
-                .expect("manifest line should contain \\0");
-            HgPath::new(&line[..pos])
-        })
-    }
-
-    /// Return an iterator over the files of the entry.
-    pub fn files_with_nodes(&self) -> impl Iterator<Item = (&HgPath, &[u8])> {
-        self.lines().filter(|line| !line.is_empty()).map(|line| {
-            let pos = line
-                .iter()
-                .position(|x| x == &b'\0')
-                .expect("manifest line should contain \\0");
-            let hash_start = pos + 1;
-            let hash_end = hash_start + 40;
-            (HgPath::new(&line[..pos]), &line[hash_start..hash_end])
-        })
+            .map(|line| {
+                let (path, rest) = line.split_2(b'\0').ok_or_else(|| {
+                    HgError::corrupted("manifest line should contain \\0")
+                })?;
+                let path = HgPath::new(path);
+                let (hex_node_id, flags) = match rest.split_last() {
+                    Some((&b'x', rest)) => (rest, Some(b'x')),
+                    Some((&b'l', rest)) => (rest, Some(b'l')),
+                    Some((&b't', rest)) => (rest, Some(b't')),
+                    _ => (rest, None),
+                };
+                Ok(ManifestEntry {
+                    path,
+                    hex_node_id,
+                    flags,
+                })
+            })
     }
 
     /// If the given path is in this manifest, return its filelog node ID
-    pub fn find_file(&self, path: &HgPath) -> Result<Option<Node>, HgError> {
+    pub fn find_file(
+        &self,
+        path: &HgPath,
+    ) -> Result<Option<ManifestEntry>, HgError> {
         // TODO: use binary search instead of linear scan. This may involve
         // building (and caching) an index of the byte indicex of each manifest
         // line.
-        for (manifest_path, node) in self.files_with_nodes() {
-            if manifest_path == path {
-                return Ok(Some(Node::from_hex_for_repo(node)?));
+
+        // TODO: use try_find when available (if still using linear scan)
+        // https://github.com/rust-lang/rust/issues/63178
+        for entry in self.iter() {
+            let entry = entry?;
+            if entry.path == path {
+                return Ok(Some(entry));
             }
         }
         Ok(None)
     }
 }
+
+/// `Manifestlog` entry which knows how to interpret the `manifest` data bytes.
+#[derive(Debug)]
+pub struct ManifestEntry<'manifest> {
+    pub path: &'manifest HgPath,
+    pub hex_node_id: &'manifest [u8],
+
+    /// `Some` values are b'x', b'l', or 't'
+    pub flags: Option<u8>,
+}
+
+impl ManifestEntry<'_> {
+    pub fn node_id(&self) -> Result<Node, HgError> {
+        Node::from_hex_for_repo(self.hex_node_id)
+    }
+}
--- a/rust/hg-core/src/revlog/node.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/revlog/node.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -174,6 +174,12 @@
             data: self.data,
         }
     }
+
+    pub fn pad_to_256_bits(&self) -> [u8; 32] {
+        let mut bits = [0; 32];
+        bits[..NODE_BYTES_LENGTH].copy_from_slice(&self.data);
+        bits
+    }
 }
 
 /// The beginning of a binary revision SHA.
--- a/rust/hg-core/src/utils.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/utils.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -145,6 +145,21 @@
     }
 }
 
+pub trait StrExt {
+    // TODO: Use https://doc.rust-lang.org/nightly/std/primitive.str.html#method.split_once
+    // once we require Rust 1.52+
+    fn split_2(&self, separator: char) -> Option<(&str, &str)>;
+}
+
+impl StrExt for str {
+    fn split_2(&self, separator: char) -> Option<(&str, &str)> {
+        let mut iter = self.splitn(2, separator);
+        let a = iter.next()?;
+        let b = iter.next()?;
+        Some((a, b))
+    }
+}
+
 pub trait Escaped {
     /// Return bytes escaped for display to the user
     fn escaped_bytes(&self) -> Vec<u8>;
--- a/rust/hg-core/src/vfs.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-core/src/vfs.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -1,6 +1,6 @@
 use crate::errors::{HgError, IoErrorContext, IoResultExt};
 use memmap2::{Mmap, MmapOptions};
-use std::io::ErrorKind;
+use std::io::{ErrorKind, Write};
 use std::path::{Path, PathBuf};
 
 /// Filesystem access abstraction for the contents of a given "base" diretory
@@ -16,6 +16,22 @@
         self.base.join(relative_path)
     }
 
+    pub fn symlink_metadata(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> Result<std::fs::Metadata, HgError> {
+        let path = self.join(relative_path);
+        std::fs::symlink_metadata(&path).when_reading_file(&path)
+    }
+
+    pub fn read_link(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> Result<PathBuf, HgError> {
+        let path = self.join(relative_path);
+        std::fs::read_link(&path).when_reading_file(&path)
+    }
+
     pub fn read(
         &self,
         relative_path: impl AsRef<Path>,
@@ -71,6 +87,47 @@
         std::fs::rename(&from, &to)
             .with_context(|| IoErrorContext::RenamingFile { from, to })
     }
+
+    pub fn remove_file(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> Result<(), HgError> {
+        let path = self.join(relative_path);
+        std::fs::remove_file(&path)
+            .with_context(|| IoErrorContext::RemovingFile(path))
+    }
+
+    #[cfg(unix)]
+    pub fn create_symlink(
+        &self,
+        relative_link_path: impl AsRef<Path>,
+        target_path: impl AsRef<Path>,
+    ) -> Result<(), HgError> {
+        let link_path = self.join(relative_link_path);
+        std::os::unix::fs::symlink(target_path, &link_path)
+            .when_writing_file(&link_path)
+    }
+
+    /// Write `contents` into a temporary file, then rename to `relative_path`.
+    /// This makes writing to a file "atomic": a reader opening that path will
+    /// see either the previous contents of the file or the complete new
+    /// content, never a partial write.
+    pub fn atomic_write(
+        &self,
+        relative_path: impl AsRef<Path>,
+        contents: &[u8],
+    ) -> Result<(), HgError> {
+        let mut tmp = tempfile::NamedTempFile::new_in(self.base)
+            .when_writing_file(self.base)?;
+        tmp.write_all(contents)
+            .and_then(|()| tmp.flush())
+            .when_writing_file(tmp.path())?;
+        let path = self.join(relative_path);
+        tmp.persist(&path)
+            .map_err(|e| e.error)
+            .when_writing_file(&path)?;
+        Ok(())
+    }
 }
 
 fn fs_metadata(
--- a/rust/hg-cpython/src/dirstate.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-cpython/src/dirstate.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -54,7 +54,6 @@
                 matcher: PyObject,
                 ignorefiles: PyList,
                 check_exec: bool,
-                last_normal_time: (u32, u32),
                 list_clean: bool,
                 list_ignored: bool,
                 list_unknown: bool,
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -18,7 +18,7 @@
 
 use crate::{
     dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
-    dirstate::item::{timestamp, DirstateItem},
+    dirstate::item::DirstateItem,
     pybytes_deref::PyBytesDeref,
 };
 use hg::{
@@ -194,16 +194,13 @@
         &self,
         p1: PyObject,
         p2: PyObject,
-        now: (u32, u32)
     ) -> PyResult<PyBytes> {
-        let now = timestamp(py, now)?;
-
-        let mut inner = self.inner(py).borrow_mut();
+        let inner = self.inner(py).borrow();
         let parents = DirstateParents {
             p1: extract_node_id(py, &p1)?,
             p2: extract_node_id(py, &p2)?,
         };
-        let result = inner.pack_v1(parents, now);
+        let result = inner.pack_v1(parents);
         match result {
             Ok(packed) => Ok(PyBytes::new(py, &packed)),
             Err(_) => Err(PyErr::new::<exc::OSError, _>(
@@ -218,17 +215,14 @@
     /// instead of written to a new data file (False).
     def write_v2(
         &self,
-        now: (u32, u32),
         can_append: bool,
     ) -> PyResult<PyObject> {
-        let now = timestamp(py, now)?;
-
-        let mut inner = self.inner(py).borrow_mut();
-        let result = inner.pack_v2(now, can_append);
+        let inner = self.inner(py).borrow();
+        let result = inner.pack_v2(can_append);
         match result {
             Ok((packed, tree_metadata, append)) => {
                 let packed = PyBytes::new(py, &packed);
-                let tree_metadata = PyBytes::new(py, &tree_metadata);
+                let tree_metadata = PyBytes::new(py, tree_metadata.as_bytes());
                 let tuple = (packed, tree_metadata, append);
                 Ok(tuple.to_py_object(py).into_object())
             },
--- a/rust/hg-cpython/src/dirstate/item.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-cpython/src/dirstate/item.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -23,7 +23,7 @@
         p2_info: bool = false,
         has_meaningful_data: bool = true,
         has_meaningful_mtime: bool = true,
-        parentfiledata: Option<(u32, u32, (u32, u32))> = None,
+        parentfiledata: Option<(u32, u32, Option<(u32, u32, bool)>)> = None,
         fallback_exec: Option<bool> = None,
         fallback_symlink: Option<bool> = None,
 
@@ -35,7 +35,9 @@
                 mode_size_opt = Some((mode, size))
             }
             if has_meaningful_mtime {
-                mtime_opt = Some(timestamp(py, mtime)?)
+                if let Some(m) = mtime {
+                    mtime_opt = Some(timestamp(py, m)?);
+                }
             }
         }
         let entry = DirstateEntry::from_v2_data(
@@ -192,12 +194,8 @@
         Ok(mtime)
     }
 
-    def need_delay(&self, now: (u32, u32)) -> PyResult<bool> {
-        let now = timestamp(py, now)?;
-        Ok(self.entry(py).get().need_delay(now))
-    }
-
-    def mtime_likely_equal_to(&self, other: (u32, u32)) -> PyResult<bool> {
+    def mtime_likely_equal_to(&self, other: (u32, u32, bool))
+        -> PyResult<bool> {
         if let Some(mtime) = self.entry(py).get().truncated_mtime() {
             Ok(mtime.likely_equal(timestamp(py, other)?))
         } else {
@@ -230,7 +228,7 @@
         &self,
         mode: u32,
         size: u32,
-        mtime: (u32, u32),
+        mtime: (u32, u32, bool),
     ) -> PyResult<PyNone> {
         let mtime = timestamp(py, mtime)?;
         self.update(py, |entry| entry.set_clean(mode, size, mtime));
@@ -275,12 +273,13 @@
 
 pub(crate) fn timestamp(
     py: Python<'_>,
-    (s, ns): (u32, u32),
+    (s, ns, second_ambiguous): (u32, u32, bool),
 ) -> PyResult<TruncatedTimestamp> {
-    TruncatedTimestamp::from_already_truncated(s, ns).map_err(|_| {
-        PyErr::new::<exc::ValueError, _>(
-            py,
-            "expected mtime truncated to 31 bits",
-        )
-    })
+    TruncatedTimestamp::from_already_truncated(s, ns, second_ambiguous)
+        .map_err(|_| {
+            PyErr::new::<exc::ValueError, _>(
+                py,
+                "expected mtime truncated to 31 bits",
+            )
+        })
 }
--- a/rust/hg-cpython/src/dirstate/status.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/hg-cpython/src/dirstate/status.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -9,7 +9,6 @@
 //! `hg-core` crate. From Python, this will be seen as
 //! `rustext.dirstate.status`.
 
-use crate::dirstate::item::timestamp;
 use crate::{dirstate::DirstateMap, exceptions::FallbackError};
 use cpython::exc::OSError;
 use cpython::{
@@ -103,13 +102,11 @@
     root_dir: PyObject,
     ignore_files: PyList,
     check_exec: bool,
-    last_normal_time: (u32, u32),
     list_clean: bool,
     list_ignored: bool,
     list_unknown: bool,
     collect_traversed_dirs: bool,
 ) -> PyResult<PyTuple> {
-    let last_normal_time = timestamp(py, last_normal_time)?;
     let bytes = root_dir.extract::<PyBytes>(py)?;
     let root_dir = get_path_from_bytes(bytes.data(py));
 
@@ -135,7 +132,6 @@
                     ignore_files,
                     StatusOptions {
                         check_exec,
-                        last_normal_time,
                         list_clean,
                         list_ignored,
                         list_unknown,
@@ -172,7 +168,6 @@
                     ignore_files,
                     StatusOptions {
                         check_exec,
-                        last_normal_time,
                         list_clean,
                         list_ignored,
                         list_unknown,
@@ -224,7 +219,6 @@
                     ignore_files,
                     StatusOptions {
                         check_exec,
-                        last_normal_time,
                         list_clean,
                         list_ignored,
                         list_unknown,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/rhg/src/commands/debugignorerhg.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -0,0 +1,40 @@
+use crate::error::CommandError;
+use clap::SubCommand;
+use hg;
+use hg::matchers::get_ignore_matcher;
+use hg::StatusError;
+use log::warn;
+
+pub const HELP_TEXT: &str = "
+Show effective hgignore patterns used by rhg.
+
+This is a pure Rust version of `hg debugignore`.
+
+Some options might be missing, check the list below.
+";
+
+pub fn args() -> clap::App<'static, 'static> {
+    SubCommand::with_name("debugignorerhg").about(HELP_TEXT)
+}
+
+pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
+    let repo = invocation.repo?;
+
+    let ignore_file = repo.working_directory_vfs().join(".hgignore"); // TODO hardcoded
+
+    let (ignore_matcher, warnings) = get_ignore_matcher(
+        vec![ignore_file],
+        &repo.working_directory_path().to_owned(),
+        &mut |_pattern_bytes| (),
+    )
+    .map_err(|e| StatusError::from(e))?;
+
+    if !warnings.is_empty() {
+        warn!("Pattern warnings: {:?}", &warnings);
+    }
+
+    let patterns = ignore_matcher.debug_get_patterns();
+    invocation.ui.write_stdout(patterns)?;
+    invocation.ui.write_stdout(b"\n")?;
+    Ok(())
+}
--- a/rust/rhg/src/commands/files.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/rhg/src/commands/files.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -3,6 +3,7 @@
 use crate::ui::UiError;
 use crate::utils::path_utils::relativize_paths;
 use clap::Arg;
+use hg::errors::HgError;
 use hg::operations::list_rev_tracked_files;
 use hg::operations::Dirstate;
 use hg::repo::Repo;
@@ -39,20 +40,48 @@
     let rev = invocation.subcommand_args.value_of("rev");
 
     let repo = invocation.repo?;
+
+    // It seems better if this check is removed: this would correspond to
+    // automatically enabling the extension if the repo requires it.
+    // However we need this check to be in sync with vanilla hg so hg tests
+    // pass.
+    if repo.has_sparse()
+        && invocation.config.get(b"extensions", b"sparse").is_none()
+    {
+        return Err(CommandError::unsupported(
+            "repo is using sparse, but sparse extension is not enabled",
+        ));
+    }
+
     if let Some(rev) = rev {
+        if repo.has_narrow() {
+            return Err(CommandError::unsupported(
+                "rhg files -r <rev> is not supported in narrow clones",
+            ));
+        }
         let files = list_rev_tracked_files(repo, rev).map_err(|e| (e, rev))?;
         display_files(invocation.ui, repo, files.iter())
     } else {
+        // The dirstate always reflects the sparse narrowspec, so if
+        // we only have sparse without narrow all is fine.
+        // If we have narrow, then [hg files] needs to check if
+        // the store narrowspec is in sync with the one of the dirstate,
+        // so we can't support that without explicit code.
+        if repo.has_narrow() {
+            return Err(CommandError::unsupported(
+                "rhg files is not supported in narrow clones",
+            ));
+        }
         let distate = Dirstate::new(repo)?;
         let files = distate.tracked_files()?;
-        display_files(invocation.ui, repo, files)
+        display_files(invocation.ui, repo, files.into_iter().map(Ok))
     }
 }
 
 fn display_files<'a>(
     ui: &Ui,
     repo: &Repo,
-    files: impl IntoIterator<Item = &'a HgPath>,
+    files: impl IntoIterator<Item = Result<&'a HgPath, HgError>>,
 ) -> Result<(), CommandError> {
     let mut stdout = ui.stdout_buffer();
     let mut any = false;
--- a/rust/rhg/src/commands/status.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/rhg/src/commands/status.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -6,20 +6,25 @@
 // GNU General Public License version 2 or any later version.
 
 use crate::error::CommandError;
-use crate::ui::{Ui, UiError};
+use crate::ui::Ui;
 use crate::utils::path_utils::relativize_paths;
 use clap::{Arg, SubCommand};
+use format_bytes::format_bytes;
 use hg;
 use hg::config::Config;
+use hg::dirstate::has_exec_bit;
 use hg::dirstate::TruncatedTimestamp;
-use hg::errors::HgError;
+use hg::dirstate::RANGE_MASK_31BIT;
+use hg::errors::{HgError, IoResultExt};
+use hg::lock::LockError;
 use hg::manifest::Manifest;
 use hg::matchers::AlwaysMatcher;
 use hg::repo::Repo;
-use hg::utils::hg_path::{hg_path_to_os_string, HgPath};
+use hg::utils::files::get_bytes_from_os_string;
+use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
 use hg::{HgPathCow, StatusOptions};
 use log::{info, warn};
-use std::borrow::Cow;
+use std::io;
 
 pub const HELP_TEXT: &str = "
 Show changed files in the working directory
@@ -81,6 +86,12 @@
                 .short("-i")
                 .long("--ignored"),
         )
+        .arg(
+            Arg::with_name("no-status")
+                .help("hide status prefix")
+                .short("-n")
+                .long("--no-status"),
+        )
 }
 
 /// Pure data type allowing the caller to specify file states to display
@@ -138,16 +149,25 @@
     }
 
     // TODO: lift these limitations
-    if invocation.config.get_bool(b"ui", b"tweakdefaults").ok() == Some(true) {
+    if invocation.config.get_bool(b"ui", b"tweakdefaults")? {
         return Err(CommandError::unsupported(
             "ui.tweakdefaults is not yet supported with rhg status",
         ));
     }
-    if invocation.config.get_bool(b"ui", b"statuscopies").ok() == Some(true) {
+    if invocation.config.get_bool(b"ui", b"statuscopies")? {
         return Err(CommandError::unsupported(
             "ui.statuscopies is not yet supported with rhg status",
         ));
     }
+    if invocation
+        .config
+        .get(b"commands", b"status.terse")
+        .is_some()
+    {
+        return Err(CommandError::unsupported(
+            "status.terse is not yet supported with rhg status",
+        ));
+    }
 
     let ui = invocation.ui;
     let config = invocation.config;
@@ -172,16 +192,19 @@
             requested
         }
     };
+    let no_status = args.is_present("no-status");
 
     let repo = invocation.repo?;
+
+    if repo.has_sparse() || repo.has_narrow() {
+        return Err(CommandError::unsupported(
+            "rhg status is not supported for sparse checkouts or narrow clones yet"
+        ));
+    }
+
     let mut dmap = repo.dirstate_map_mut()?;
 
     let options = StatusOptions {
-        // TODO should be provided by the dirstate parsing and
-        // hence be stored on dmap. Using a value that assumes we aren't
-        // below the time resolution granularity of the FS and the
-        // dirstate.
-        last_normal_time: TruncatedTimestamp::new_truncate(0, 0),
         // we're currently supporting file systems with exec flags only
         // anyway
         check_exec: true,
@@ -210,6 +233,7 @@
             &ds_status.unsure
         );
     }
+    let mut fixup = Vec::new();
     if !ds_status.unsure.is_empty()
         && (display_states.modified || display_states.clean)
     {
@@ -218,37 +242,152 @@
             CommandError::from((e, &*format!("{:x}", p1.short())))
         })?;
         for to_check in ds_status.unsure {
-            if cat_file_is_modified(repo, &manifest, &to_check)? {
+            if unsure_is_modified(repo, &manifest, &to_check)? {
                 if display_states.modified {
                     ds_status.modified.push(to_check);
                 }
             } else {
                 if display_states.clean {
-                    ds_status.clean.push(to_check);
+                    ds_status.clean.push(to_check.clone());
                 }
+                fixup.push(to_check.into_owned())
             }
         }
     }
     if display_states.modified {
-        display_status_paths(ui, repo, config, &mut ds_status.modified, b"M")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.modified,
+            b"M",
+        )?;
     }
     if display_states.added {
-        display_status_paths(ui, repo, config, &mut ds_status.added, b"A")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.added,
+            b"A",
+        )?;
     }
     if display_states.removed {
-        display_status_paths(ui, repo, config, &mut ds_status.removed, b"R")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.removed,
+            b"R",
+        )?;
     }
     if display_states.deleted {
-        display_status_paths(ui, repo, config, &mut ds_status.deleted, b"!")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.deleted,
+            b"!",
+        )?;
     }
     if display_states.unknown {
-        display_status_paths(ui, repo, config, &mut ds_status.unknown, b"?")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.unknown,
+            b"?",
+        )?;
     }
     if display_states.ignored {
-        display_status_paths(ui, repo, config, &mut ds_status.ignored, b"I")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.ignored,
+            b"I",
+        )?;
     }
     if display_states.clean {
-        display_status_paths(ui, repo, config, &mut ds_status.clean, b"C")?;
+        display_status_paths(
+            ui,
+            repo,
+            config,
+            no_status,
+            &mut ds_status.clean,
+            b"C",
+        )?;
+    }
+
+    let mut dirstate_write_needed = ds_status.dirty;
+    let filesystem_time_at_status_start = ds_status
+        .filesystem_time_at_status_start
+        .map(TruncatedTimestamp::from);
+
+    if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
+        && !dirstate_write_needed
+    {
+        // Nothing to update
+        return Ok(());
+    }
+
+    // Update the dirstate on disk if we can
+    let with_lock_result =
+        repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
+            if let Some(mtime_boundary) = filesystem_time_at_status_start {
+                for hg_path in fixup {
+                    use std::os::unix::fs::MetadataExt;
+                    let fs_path = hg_path_to_path_buf(&hg_path)
+                        .expect("HgPath conversion");
+                    // Specifically do not reuse `fs_metadata` from
+                    // `unsure_is_clean` which was needed before reading
+                    // contents. Here we access metadata again after reading
+                    // content, in case it changed in the meantime.
+                    let fs_metadata = repo
+                        .working_directory_vfs()
+                        .symlink_metadata(&fs_path)?;
+                    let mtime = TruncatedTimestamp::for_mtime_of(&fs_metadata)
+                        .when_reading_file(&fs_path)?;
+                    if mtime.is_reliable_mtime(&mtime_boundary) {
+                        let mode = fs_metadata.mode();
+                        let size = fs_metadata.len() as u32 & RANGE_MASK_31BIT;
+                        let mut entry = dmap
+                            .get(&hg_path)?
+                            .expect("ambiguous file not in dirstate");
+                        entry.set_clean(mode, size, mtime);
+                        dmap.add_file(&hg_path, entry)?;
+                        dirstate_write_needed = true
+                    }
+                }
+            }
+            drop(dmap); // Avoid "already mutably borrowed" RefCell panics
+            if dirstate_write_needed {
+                repo.write_dirstate()?
+            }
+            Ok(())
+        });
+    match with_lock_result {
+        Ok(closure_result) => closure_result?,
+        Err(LockError::AlreadyHeld) => {
+            // Not updating the dirstate is not ideal but not critical:
+            // don’t keep our caller waiting until some other Mercurial
+            // process releases the lock.
+        }
+        Err(LockError::Other(HgError::IoError { error, .. }))
+            if error.kind() == io::ErrorKind::PermissionDenied =>
+        {
+            // `hg status` on a read-only repository is fine
+        }
+        Err(LockError::Other(error)) => {
+            // Report other I/O errors
+            Err(error)?
+        }
     }
     Ok(())
 }
@@ -259,32 +398,32 @@
     ui: &Ui,
     repo: &Repo,
     config: &Config,
+    no_status: bool,
     paths: &mut [HgPathCow],
     status_prefix: &[u8],
 ) -> Result<(), CommandError> {
     paths.sort_unstable();
-    let mut relative: bool =
-        config.get_bool(b"ui", b"relative-paths").unwrap_or(false);
+    let mut relative: bool = config.get_bool(b"ui", b"relative-paths")?;
     relative = config
-        .get_bool(b"commands", b"status.relative")
+        .get_option(b"commands", b"status.relative")?
         .unwrap_or(relative);
+    let print_path = |path: &[u8]| {
+        // TODO optim, probably lots of unneeded copies here, especially
+        // if out stream is buffered
+        if no_status {
+            ui.write_stdout(&format_bytes!(b"{}\n", path))
+        } else {
+            ui.write_stdout(&format_bytes!(b"{} {}\n", status_prefix, path))
+        }
+    };
+
     if relative && !ui.plain() {
-        relativize_paths(
-            repo,
-            paths,
-            |path: Cow<[u8]>| -> Result<(), UiError> {
-                ui.write_stdout(
-                    &[status_prefix, b" ", path.as_ref(), b"\n"].concat(),
-                )
-            },
-        )?;
+        relativize_paths(repo, paths.iter().map(Ok), |path| {
+            print_path(&path)
+        })?;
     } else {
         for path in paths {
-            // Same TODO as in commands::root
-            let bytes: &[u8] = path.as_bytes();
-            // TODO optim, probably lots of unneeded copies here, especially
-            // if out stream is buffered
-            ui.write_stdout(&[status_prefix, b" ", bytes, b"\n"].concat())?;
+            print_path(path.as_bytes())?
         }
     }
     Ok(())
@@ -294,23 +433,42 @@
 ///
 /// This meant to be used for those that the dirstate cannot resolve, due
 /// to time resolution limits.
-///
-/// TODO: detect permission bits and similar metadata modifications
-fn cat_file_is_modified(
+fn unsure_is_modified(
     repo: &Repo,
     manifest: &Manifest,
     hg_path: &HgPath,
 ) -> Result<bool, HgError> {
-    let file_node = manifest
+    let vfs = repo.working_directory_vfs();
+    let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
+    let fs_metadata = vfs.symlink_metadata(&fs_path)?;
+    let is_symlink = fs_metadata.file_type().is_symlink();
+    // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
+    // dirstate
+    let fs_flags = if is_symlink {
+        Some(b'l')
+    } else if has_exec_bit(&fs_metadata) {
+        Some(b'x')
+    } else {
+        None
+    };
+
+    let entry = manifest
         .find_file(hg_path)?
         .expect("ambgious file not in p1");
+    if entry.flags != fs_flags {
+        return Ok(true);
+    }
     let filelog = repo.filelog(hg_path)?;
-    let filelog_entry = filelog.data_for_node(file_node).map_err(|_| {
-        HgError::corrupted("filelog missing node from manifest")
-    })?;
+    let filelog_entry =
+        filelog.data_for_node(entry.node_id()?).map_err(|_| {
+            HgError::corrupted("filelog missing node from manifest")
+        })?;
     let contents_in_p1 = filelog_entry.data()?;
 
-    let fs_path = hg_path_to_os_string(hg_path).expect("HgPath conversion");
-    let fs_contents = repo.working_directory_vfs().read(fs_path)?;
-    return Ok(contents_in_p1 != &*fs_contents);
+    let fs_contents = if is_symlink {
+        get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
+    } else {
+        vfs.read(fs_path)?
+    };
+    Ok(contents_in_p1 != &*fs_contents)
 }
--- a/rust/rhg/src/main.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/rhg/src/main.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -1,4 +1,5 @@
 extern crate log;
+use crate::error::CommandError;
 use crate::ui::Ui;
 use clap::App;
 use clap::AppSettings;
@@ -20,7 +21,6 @@
 pub mod utils {
     pub mod path_utils;
 }
-use error::CommandError;
 
 fn main_with_result(
     process_start_time: &blackbox::ProcessStartTime,
@@ -28,7 +28,7 @@
     repo: Result<&Repo, &NoRepoInCwdError>,
     config: &Config,
 ) -> Result<(), CommandError> {
-    check_extensions(config)?;
+    check_unsupported(config, ui)?;
 
     let app = App::new("rhg")
         .global_setting(AppSettings::AllowInvalidUtf8)
@@ -110,18 +110,23 @@
         }
     }
 
-    let blackbox = blackbox::Blackbox::new(&invocation, process_start_time)?;
-    blackbox.log_command_start();
-    let result = run(&invocation);
-    blackbox.log_command_end(exit_code(
-        &result,
-        // TODO: show a warning or combine with original error if `get_bool`
-        // returns an error
-        config
-            .get_bool(b"ui", b"detailed-exit-code")
-            .unwrap_or(false),
-    ));
-    result
+    if config.is_extension_enabled(b"blackbox") {
+        let blackbox =
+            blackbox::Blackbox::new(&invocation, process_start_time)?;
+        blackbox.log_command_start();
+        let result = run(&invocation);
+        blackbox.log_command_end(exit_code(
+            &result,
+            // TODO: show a warning or combine with original error if
+            // `get_bool` returns an error
+            config
+                .get_bool(b"ui", b"detailed-exit-code")
+                .unwrap_or(false),
+        ));
+        result
+    } else {
+        run(&invocation)
+    }
 }
 
 fn main() {
@@ -179,7 +184,7 @@
             exit(
                 &initial_current_dir,
                 &ui,
-                OnUnsupported::from_config(&ui, &non_repo_config),
+                OnUnsupported::from_config(&non_repo_config),
                 Err(error.into()),
                 non_repo_config
                     .get_bool(b"ui", b"detailed-exit-code")
@@ -197,7 +202,7 @@
             exit(
                 &initial_current_dir,
                 &ui,
-                OnUnsupported::from_config(&ui, &non_repo_config),
+                OnUnsupported::from_config(&non_repo_config),
                 Err(CommandError::UnsupportedFeature {
                     message: format_bytes!(
                         b"URL-like --repository {}",
@@ -287,7 +292,7 @@
         Err(error) => exit(
             &initial_current_dir,
             &ui,
-            OnUnsupported::from_config(&ui, &non_repo_config),
+            OnUnsupported::from_config(&non_repo_config),
             Err(error.into()),
             // TODO: show a warning or combine with original error if
             // `get_bool` returns an error
@@ -302,7 +307,7 @@
     } else {
         &non_repo_config
     };
-    let on_unsupported = OnUnsupported::from_config(&ui, config);
+    let on_unsupported = OnUnsupported::from_config(config);
 
     let result = main_with_result(
         &process_start_time,
@@ -362,6 +367,20 @@
     ) = (&on_unsupported, &result)
     {
         let mut args = std::env::args_os();
+        let executable = match executable {
+            None => {
+                exit_no_fallback(
+                    ui,
+                    OnUnsupported::Abort,
+                    Err(CommandError::abort(
+                        "abort: 'rhg.on-unsupported=fallback' without \
+                                'rhg.fallback-executable' set.",
+                    )),
+                    false,
+                );
+            }
+            Some(executable) => executable,
+        };
         let executable_path = get_path_from_bytes(&executable);
         let this_executable = args.next().expect("exepcted argv[0] to exist");
         if executable_path == &PathBuf::from(this_executable) {
@@ -374,7 +393,8 @@
             ));
             on_unsupported = OnUnsupported::Abort
         } else {
-            // `args` is now `argv[1..]` since we’ve already consumed `argv[0]`
+            // `args` is now `argv[1..]` since we’ve already consumed
+            // `argv[0]`
             let mut command = Command::new(executable_path);
             command.args(args);
             if let Some(initial) = initial_current_dir {
@@ -465,6 +485,7 @@
     cat
     debugdata
     debugrequirements
+    debugignorerhg
     files
     root
     config
@@ -549,13 +570,13 @@
     /// Silently exit with code 252.
     AbortSilent,
     /// Try running a Python implementation
-    Fallback { executable: Vec<u8> },
+    Fallback { executable: Option<Vec<u8>> },
 }
 
 impl OnUnsupported {
     const DEFAULT: Self = OnUnsupported::Abort;
 
-    fn from_config(ui: &Ui, config: &Config) -> Self {
+    fn from_config(config: &Config) -> Self {
         match config
             .get(b"rhg", b"on-unsupported")
             .map(|value| value.to_ascii_lowercase())
@@ -566,18 +587,7 @@
             Some(b"fallback") => OnUnsupported::Fallback {
                 executable: config
                     .get(b"rhg", b"fallback-executable")
-                    .unwrap_or_else(|| {
-                        exit_no_fallback(
-                            ui,
-                            Self::Abort,
-                            Err(CommandError::abort(
-                                "abort: 'rhg.on-unsupported=fallback' without \
-                                'rhg.fallback-executable' set."
-                            )),
-                            false,
-                        )
-                    })
-                    .to_owned(),
+                    .map(|x| x.to_owned()),
             },
             None => Self::DEFAULT,
             Some(_) => {
@@ -588,7 +598,8 @@
     }
 }
 
-const SUPPORTED_EXTENSIONS: &[&[u8]] = &[b"blackbox", b"share"];
+const SUPPORTED_EXTENSIONS: &[&[u8]] =
+    &[b"blackbox", b"share", b"sparse", b"narrow"];
 
 fn check_extensions(config: &Config) -> Result<(), CommandError> {
     let enabled = config.get_section_keys(b"extensions");
@@ -616,3 +627,32 @@
         })
     }
 }
+
+fn check_unsupported(
+    config: &Config,
+    ui: &ui::Ui,
+) -> Result<(), CommandError> {
+    check_extensions(config)?;
+
+    if std::env::var_os("HG_PENDING").is_some() {
+        // TODO: only if the value is `== repo.working_directory`?
+        // What about relative v.s. absolute paths?
+        Err(CommandError::unsupported("$HG_PENDING"))?
+    }
+
+    if config.has_non_empty_section(b"encode") {
+        Err(CommandError::unsupported("[encode] config"))?
+    }
+
+    if config.has_non_empty_section(b"decode") {
+        Err(CommandError::unsupported("[decode] config"))?
+    }
+
+    if let Some(color) = config.get(b"ui", b"color") {
+        if (color == b"always" || color == b"debug") && !ui.plain() {
+            Err(CommandError::unsupported("colored output"))?
+        }
+    }
+
+    Ok(())
+}
--- a/rust/rhg/src/utils/path_utils.rs	Mon Dec 06 10:08:04 2021 +0100
+++ b/rust/rhg/src/utils/path_utils.rs	Wed Dec 08 10:14:37 2021 +0100
@@ -5,6 +5,7 @@
 
 use crate::error::CommandError;
 use crate::ui::UiError;
+use hg::errors::HgError;
 use hg::repo::Repo;
 use hg::utils::current_dir;
 use hg::utils::files::{get_bytes_from_path, relativize_path};
@@ -14,7 +15,7 @@
 
 pub fn relativize_paths(
     repo: &Repo,
-    paths: impl IntoIterator<Item = impl AsRef<HgPath>>,
+    paths: impl IntoIterator<Item = Result<impl AsRef<HgPath>, HgError>>,
     mut callback: impl FnMut(Cow<[u8]>) -> Result<(), UiError>,
 ) -> Result<(), CommandError> {
     let cwd = current_dir()?;
@@ -38,10 +39,10 @@
 
     for file in paths {
         if outside_repo {
-            let file = repo_root_hgpath.join(file.as_ref());
+            let file = repo_root_hgpath.join(file?.as_ref());
             callback(relativize_path(&file, &cwd_hgpath))?;
         } else {
-            callback(relativize_path(file.as_ref(), &cwd_hgpath))?;
+            callback(relativize_path(file?.as_ref(), &cwd_hgpath))?;
         }
     }
     Ok(())
--- a/setup.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/setup.py	Wed Dec 08 10:14:37 2021 +0100
@@ -535,7 +535,7 @@
             # (see mercurial/__modulepolicy__.py)
             if hgrustext != 'cpython' and hgrustext is not None:
                 if hgrustext:
-                    msg = 'unkown HGWITHRUSTEXT value: %s' % hgrustext
+                    msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
                     printf(msg, file=sys.stderr)
                 hgrustext = None
             self.rust = hgrustext is not None
--- a/tests/failfilemerge.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/failfilemerge.py	Wed Dec 08 10:14:37 2021 +0100
@@ -9,12 +9,9 @@
 )
 
 
-def failfilemerge(
-    filemergefn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
-):
+def failfilemerge(*args, **kwargs):
     raise error.Abort(b"^C")
-    return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
 
 
 def extsetup(ui):
-    extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
+    extensions.wrapfunction(filemerge, 'filemerge', failfilemerge)
--- a/tests/fakedirstatewritetime.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/fakedirstatewritetime.py	Wed Dec 08 10:14:37 2021 +0100
@@ -9,7 +9,6 @@
 
 from mercurial import (
     context,
-    dirstate,
     dirstatemap as dirstatemapmod,
     extensions,
     policy,
@@ -38,14 +37,8 @@
 has_rust_dirstate = policy.importrust('dirstate') is not None
 
 
-def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
-    # execute what original parsers.pack_dirstate should do actually
-    # for consistency
-    for f, e in dmap.items():
-        if e.need_delay(now):
-            e.set_possibly_dirty()
-
-    return orig(dmap, copymap, pl, fakenow)
+def pack_dirstate(orig, dmap, copymap, pl):
+    return orig(dmap, copymap, pl)
 
 
 def fakewrite(ui, func):
@@ -62,30 +55,30 @@
     # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
     # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
     fakenow = dateutil.parsedate(fakenow, [b'%Y%m%d%H%M'])[0]
-    fakenow = timestamp.timestamp((fakenow, 0))
+    fakenow = timestamp.timestamp((fakenow, 0, False))
 
     if has_rust_dirstate:
         # The Rust implementation does not use public parse/pack dirstate
         # to prevent conversion round-trips
         orig_dirstatemap_write = dirstatemapmod.dirstatemap.write
-        wrapper = lambda self, tr, st, now: orig_dirstatemap_write(
-            self, tr, st, fakenow
-        )
+        wrapper = lambda self, tr, st: orig_dirstatemap_write(self, tr, st)
         dirstatemapmod.dirstatemap.write = wrapper
 
-    orig_dirstate_getfsnow = dirstate._getfsnow
-    wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
+    orig_get_fs_now = timestamp.get_fs_now
+    wrapper = lambda *args: pack_dirstate(orig_pack_dirstate, *args)
 
     orig_module = parsers
     orig_pack_dirstate = parsers.pack_dirstate
 
     orig_module.pack_dirstate = wrapper
-    dirstate._getfsnow = lambda *args: fakenow
+    timestamp.get_fs_now = (
+        lambda *args: fakenow
+    )  # XXX useless for this purpose now
     try:
         return func()
     finally:
         orig_module.pack_dirstate = orig_pack_dirstate
-        dirstate._getfsnow = orig_dirstate_getfsnow
+        timestamp.get_fs_now = orig_get_fs_now
         if has_rust_dirstate:
             dirstatemapmod.dirstatemap.write = orig_dirstatemap_write
 
--- a/tests/run-tests.py	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/run-tests.py	Wed Dec 08 10:14:37 2021 +0100
@@ -3228,6 +3228,7 @@
             # output.
             osenvironb[b'RHG_ON_UNSUPPORTED'] = b'fallback'
             osenvironb[b'RHG_FALLBACK_EXECUTABLE'] = real_hg
+            osenvironb[b'RHG_STATUS'] = b'1'
         else:
             # drop flag for hghave
             osenvironb.pop(b'RHG_INSTALLED_AS_HG', None)
--- a/tests/test-audit-path.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-audit-path.t	Wed Dec 08 10:14:37 2021 +0100
@@ -8,7 +8,7 @@
 
   $ hg add .hg/00changelog.i
   abort: path contains illegal component: .hg/00changelog.i
-  [255]
+  [10]
 
 #if symlink
 
@@ -91,7 +91,7 @@
   .hg/test
   $ hg update -Cr0
   abort: path contains illegal component: .hg/test
-  [255]
+  [10]
 
 attack foo/.hg/test
 
@@ -99,7 +99,7 @@
   foo/.hg/test
   $ hg update -Cr1
   abort: path 'foo/.hg/test' is inside nested repo 'foo'
-  [255]
+  [10]
 
 attack back/test where back symlinks to ..
 
@@ -125,7 +125,7 @@
   $ echo data > ../test/file
   $ hg update -Cr3
   abort: path contains illegal component: ../test
-  [255]
+  [10]
   $ cat ../test/file
   data
 
@@ -135,7 +135,7 @@
   /tmp/test
   $ hg update -Cr4
   abort: path contains illegal component: /tmp/test
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-audit-subrepo.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-audit-subrepo.t	Wed Dec 08 10:14:37 2021 +0100
@@ -10,7 +10,7 @@
   $ echo 'sub/.hg = sub/.hg' >> .hgsub
   $ hg ci -qAm 'add subrepo "sub/.hg"'
   abort: path 'sub/.hg' is inside nested repo 'sub'
-  [255]
+  [10]
 
 prepare tampered repo (including the commit above):
 
@@ -34,7 +34,7 @@
 
   $ hg clone -q hgname hgname2
   abort: path 'sub/.hg' is inside nested repo 'sub'
-  [255]
+  [10]
 
 Test absolute path
 ------------------
@@ -47,7 +47,7 @@
   $ echo '/sub = sub' >> .hgsub
   $ hg ci -qAm 'add subrepo "/sub"'
   abort: path contains illegal component: /sub
-  [255]
+  [10]
 
 prepare tampered repo (including the commit above):
 
@@ -71,7 +71,7 @@
 
   $ hg clone -q absolutepath absolutepath2
   abort: path contains illegal component: /sub
-  [255]
+  [10]
 
 Test root path
 --------------
@@ -84,7 +84,7 @@
   $ echo '/ = sub' >> .hgsub
   $ hg ci -qAm 'add subrepo "/"'
   abort: path ends in directory separator: /
-  [255]
+  [10]
 
 prepare tampered repo (including the commit above):
 
@@ -108,7 +108,7 @@
 
   $ hg clone -q rootpath rootpath2
   abort: path ends in directory separator: /
-  [255]
+  [10]
 
 Test empty path
 ---------------
@@ -197,7 +197,7 @@
   $ echo '../sub = ../sub' >> .hgsub
   $ hg ci -qAm 'add subrepo "../sub"'
   abort: path contains illegal component: ../sub
-  [255]
+  [10]
 
 prepare tampered repo (including the commit above):
 
@@ -221,7 +221,7 @@
 
   $ hg clone -q main main2
   abort: path contains illegal component: ../sub
-  [255]
+  [10]
   $ cd ..
 
 Test variable expansion
@@ -718,7 +718,7 @@
 
   $ hg clone -q driveletter driveletter2
   abort: path contains illegal component: X:
-  [255]
+  [10]
 
 #else
 
--- a/tests/test-bad-extension.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-bad-extension.t	Wed Dec 08 10:14:37 2021 +0100
@@ -52,16 +52,18 @@
   > EOF
 
   $ hg -q help help 2>&1 |grep extension
-  *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
-  *** failed to import extension badext2: No module named *badext2* (glob)
+  *** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
+  *** failed to import extension "badext2": No module named 'badext2' (py3 !)
+  *** failed to import extension "badext2": No module named badext2 (no-py3 !)
 
 show traceback
 
   $ hg -q help help --traceback 2>&1 | egrep ' extension|^Exception|Traceback|ImportError|ModuleNotFound'
-  *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
+  *** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
   Traceback (most recent call last):
   Exception: bit bucket overflow
-  *** failed to import extension badext2: No module named *badext2* (glob)
+  *** failed to import extension "badext2": No module named 'badext2' (py3 !)
+  *** failed to import extension "badext2": No module named badext2 (no-py3 !)
   Traceback (most recent call last):
   ImportError: No module named badext2 (no-py3 !)
   ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
@@ -101,7 +103,7 @@
   YYYY/MM/DD HH:MM:SS (PID)>     - invoking registered callbacks: gpg
   YYYY/MM/DD HH:MM:SS (PID)>     > callbacks completed in * (glob)
   YYYY/MM/DD HH:MM:SS (PID)>   - loading extension: badext
-  *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
+  *** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
   Traceback (most recent call last):
   Exception: bit bucket overflow
   YYYY/MM/DD HH:MM:SS (PID)>   - loading extension: baddocext
@@ -123,7 +125,8 @@
   Traceback (most recent call last): (py3 !)
   ImportError: No module named 'hgext3rd.badext2' (py3 no-py36 !)
   ModuleNotFoundError: No module named 'hgext3rd.badext2' (py36 !)
-  *** failed to import extension badext2: No module named *badext2* (glob)
+  *** failed to import extension "badext2": No module named 'badext2' (py3 !)
+  *** failed to import extension "badext2": No module named badext2 (no-py3 !)
   Traceback (most recent call last):
   ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
   ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
@@ -160,8 +163,9 @@
 confirm that there's no crash when an extension's documentation is bad
 
   $ hg help --keyword baddocext
-  *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
-  *** failed to import extension badext2: No module named *badext2* (glob)
+  *** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
+  *** failed to import extension "badext2": No module named 'badext2' (py3 !)
+  *** failed to import extension "badext2": No module named badext2 (no-py3 !)
   Topics:
   
    extensions Using Additional Features
--- a/tests/test-basic.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-basic.t	Wed Dec 08 10:14:37 2021 +0100
@@ -40,7 +40,7 @@
   A a
 
   $ hg status >/dev/full
-  abort: No space left on device
+  abort: No space left on device* (glob)
   [255]
 #endif
 
--- a/tests/test-bookmarks-current.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-bookmarks-current.t	Wed Dec 08 10:14:37 2021 +0100
@@ -245,4 +245,4 @@
   $ hg bookmarks --inactive
   $ hg bookmarks -ql .
   abort: no active bookmark
-  [255]
+  [10]
--- a/tests/test-bookmarks-pushpull.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-bookmarks-pushpull.t	Wed Dec 08 10:14:37 2021 +0100
@@ -357,7 +357,7 @@
   (leaving bookmark V)
   $ hg push -B . ../a
   abort: no active bookmark
-  [255]
+  [10]
   $ hg update -r V
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   (activating bookmark V)
--- a/tests/test-bookmarks.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-bookmarks.t	Wed Dec 08 10:14:37 2021 +0100
@@ -278,7 +278,7 @@
   $ hg book -i rename-me
   $ hg book -m . renamed
   abort: no active bookmark
-  [255]
+  [10]
   $ hg up -q Y
   $ hg book -d rename-me
 
@@ -298,7 +298,7 @@
   $ hg book -i delete-me
   $ hg book -d .
   abort: no active bookmark
-  [255]
+  [10]
   $ hg up -q Y
   $ hg book -d delete-me
 
--- a/tests/test-branch-option.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-branch-option.t	Wed Dec 08 10:14:37 2021 +0100
@@ -58,12 +58,12 @@
 
   $ hg in -qbz
   abort: unknown branch 'z'
-  [255]
+  [10]
   $ hg in -q ../branch#z
   2:f25d57ab0566
   $ hg out -qbz
   abort: unknown branch 'z'
-  [255]
+  [10]
 
 in rev c branch a
 
--- a/tests/test-bundle.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-bundle.t	Wed Dec 08 10:14:37 2021 +0100
@@ -716,7 +716,7 @@
   $ hg incoming '../test#bundle.hg'
   comparing with ../test
   abort: unknown revision 'bundle.hg'
-  [255]
+  [10]
 
 note that percent encoding is not handled:
 
--- a/tests/test-casecollision.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-casecollision.t	Wed Dec 08 10:14:37 2021 +0100
@@ -12,7 +12,7 @@
   ? A
   $ hg add --config ui.portablefilenames=abort A
   abort: possible case-folding collision for A
-  [255]
+  [20]
   $ hg st
   A a
   ? A
--- a/tests/test-check-module-imports.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-check-module-imports.t	Wed Dec 08 10:14:37 2021 +0100
@@ -41,4 +41,5 @@
   > -X tests/test-demandimport.py \
   > -X tests/test-imports-checker.t \
   > -X tests/test-verify-repo-operations.py \
+  > -X tests/test-extension.t \
   > | sed 's-\\-/-g' | "$PYTHON" "$import_checker" -
--- a/tests/test-commandserver.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-commandserver.t	Wed Dec 08 10:14:37 2021 +0100
@@ -159,7 +159,7 @@
   ...                         b'default'])
   *** runcommand log -b --config=alias.log=!echo pwned default
   abort: unknown revision '--config=alias.log=!echo pwned'
-   [255]
+   [10]
 
 check that "histedit --commands=-" can read rules from the input channel:
 
--- a/tests/test-commit-interactive.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-commit-interactive.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1494,7 +1494,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej
   abort: patch failed to apply
-  [10]
+  [20]
   $ cat editedfile
   This change will not be committed
   This is the second line
--- a/tests/test-commit.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-commit.t	Wed Dec 08 10:14:37 2021 +0100
@@ -134,13 +134,13 @@
   $ hg add quux
   $ hg commit -m "adding internal used extras" --extra amend_source=hash
   abort: key 'amend_source' is used internally, can't be set manually
-  [255]
+  [10]
   $ hg commit -m "special chars in extra" --extra id@phab=214
   abort: keys can only contain ascii letters, digits, '_' and '-'
-  [255]
+  [10]
   $ hg commit -m "empty key" --extra =value
   abort: unable to parse '=value', keys can't be empty
-  [255]
+  [10]
   $ hg commit -m "adding extras" --extra sourcehash=foo --extra oldhash=bar
   $ hg log -r . -T '{extras % "{extra}\n"}'
   branch=default
@@ -661,11 +661,11 @@
 #if windows
   $ hg co --clean tip
   abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc)
-  [255]
+  [10]
 #else
   $ hg co --clean tip
   abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
-  [255]
+  [10]
 #endif
 
   $ hg rollback -f
@@ -686,7 +686,7 @@
   $ "$PYTHON" evil-commit.py
   $ hg co --clean tip
   abort: path contains illegal component: HG~1/hgrc
-  [255]
+  [10]
 
   $ hg rollback -f
   repository tip rolled back to revision 2 (undo commit)
@@ -706,7 +706,7 @@
   $ "$PYTHON" evil-commit.py
   $ hg co --clean tip
   abort: path contains illegal component: HG8B6C~2/hgrc
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-conflict.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-conflict.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1,3 +1,7 @@
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init
   $ cat << EOF > a
   > Small Mathematical Series.
--- a/tests/test-copy-move-merge.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-copy-move-merge.t	Wed Dec 08 10:14:37 2021 +0100
@@ -104,12 +104,12 @@
    preserving a for resolve of b
    preserving a for resolve of c
   removing a
-   b: remote moved from a -> m (premerge)
+   b: remote moved from a -> m
   picked tool ':merge' for b (binary False symlink False changedelete False)
   merging a and b to b
   my b@add3f11052fa+ other b@17c05bb7fcb6 ancestor a@b8bf91eeebbc
    premerge successful
-   c: remote moved from a -> m (premerge)
+   c: remote moved from a -> m
   picked tool ':merge' for c (binary False symlink False changedelete False)
   merging a and c to c
   my c@add3f11052fa+ other c@17c05bb7fcb6 ancestor a@b8bf91eeebbc
--- a/tests/test-diff-unified.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-diff-unified.t	Wed Dec 08 10:14:37 2021 +0100
@@ -46,7 +46,7 @@
 
   $ hg diff --nodates -U foo
   abort: diff context lines count must be an integer, not 'foo'
-  [255]
+  [10]
 
 
   $ hg diff --nodates -U 2
@@ -87,7 +87,7 @@
 
   $ hg --config diff.unified=foo diff --nodates
   abort: diff context lines count must be an integer, not 'foo'
-  [255]
+  [10]
 
 noprefix config and option
 
--- a/tests/test-dirstate-race.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-dirstate-race.t	Wed Dec 08 10:14:37 2021 +0100
@@ -18,7 +18,7 @@
 Do we ever miss a sub-second change?:
 
   $ for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20; do
-  >     hg co -qC 0
+  >     hg update -qC 0
   >     echo b > a
   >     hg st
   > done
@@ -66,11 +66,11 @@
   > )
   > def extsetup(ui):
   >     extensions.wrapfunction(context.workingctx, '_checklookup', overridechecklookup)
-  > def overridechecklookup(orig, self, files):
+  > def overridechecklookup(orig, self, *args, **kwargs):
   >     # make an update that changes the dirstate from underneath
   >     self._repo.ui.system(br"sh '$TESTTMP/dirstaterace.sh'",
   >                          cwd=self._repo.root)
-  >     return orig(self, files)
+  >     return orig(self, *args, **kwargs)
   > EOF
 
   $ hg debugrebuilddirstate
@@ -89,6 +89,7 @@
   > rm b && rm -r dir1 && rm d && mkdir d && rm e && mkdir e
   > EOF
 
+  $ sleep 1 # ensure non-ambiguous mtime
   $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py
   M d
   M e
@@ -147,6 +148,8 @@
   > 
   > hg update -q -C 0
   > hg cat -r 1 b > b
+  > # make sure the timestamps is not ambiguous and a write will be issued
+  > touch -t 198606251012 b
   > EOF
 
 "hg status" below should excludes "e", of which exec flag is set, for
--- a/tests/test-dirstate-race2.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-dirstate-race2.t	Wed Dec 08 10:14:37 2021 +0100
@@ -19,6 +19,12 @@
   $ hg commit -qAm _
   $ echo aa > a
   $ hg commit -m _
+# this sleep is there to ensure current time has -at-least- one second away
+# from the current time. It ensure the mtime is not ambiguous. If the test
+# "sleep" longer this will be fine.
+# It is not used to synchronise parallele operation so it is "fine" to use it.
+  $ sleep 1
+  $ hg status
 
   $ hg debugdirstate --no-dates
   n 644          3 (set  |unset)               a (re)
--- a/tests/test-dispatch.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-dispatch.t	Wed Dec 08 10:14:37 2021 +0100
@@ -84,7 +84,7 @@
   > raise Exception('bad')
   > EOF
   $ hg log -b '--config=extensions.bad=bad.py' default
-  *** failed to import extension bad from bad.py: bad
+  *** failed to import extension "bad" from bad.py: bad
   abort: option --config may not be abbreviated
   [10]
 
@@ -127,20 +127,20 @@
 #if no-chg
   $ HGPLAIN=+strictflags hg log -b --config='hooks.pre-log=false' default
   abort: unknown revision '--config=hooks.pre-log=false'
-  [255]
+  [10]
   $ HGPLAIN=+strictflags hg log -b -R. default
   abort: unknown revision '-R.'
-  [255]
+  [10]
   $ HGPLAIN=+strictflags hg log -b --cwd=. default
   abort: unknown revision '--cwd=.'
-  [255]
+  [10]
 #endif
   $ HGPLAIN=+strictflags hg log -b --debugger default
   abort: unknown revision '--debugger'
-  [255]
+  [10]
   $ HGPLAIN=+strictflags hg log -b --config='alias.log=!echo pwned' default
   abort: unknown revision '--config=alias.log=!echo pwned'
-  [255]
+  [10]
 
   $ HGPLAIN=+strictflags hg log --config='hooks.pre-log=false' -b default
   abort: option --config may not be abbreviated
--- a/tests/test-double-merge.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-double-merge.t	Wed Dec 08 10:14:37 2021 +0100
@@ -38,12 +38,12 @@
   starting 4 threads for background file closing (?)
    preserving foo for resolve of bar
    preserving foo for resolve of foo
-   bar: remote copied from foo -> m (premerge)
+   bar: remote copied from foo -> m
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging foo and bar to bar
   my bar@6a0df1dad128+ other bar@484bf6903104 ancestor foo@e6dc8efe11cc
    premerge successful
-   foo: versions differ -> m (premerge)
+   foo: versions differ -> m
   picked tool ':merge' for foo (binary False symlink False changedelete False)
   merging foo
   my foo@6a0df1dad128+ other foo@484bf6903104 ancestor foo@e6dc8efe11cc
--- a/tests/test-extension.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-extension.t	Wed Dec 08 10:14:37 2021 +0100
@@ -649,7 +649,7 @@
 module stub. Our custom lazy importer for Python 2 always returns a stub.
 
   $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) || true
-  *** failed to import extension checkrelativity from $TESTTMP/checkrelativity.py: No module named 'extlibroot.lsub1.lsub2.notexist' (py3 !)
+  *** failed to import extension "checkrelativity" from $TESTTMP/checkrelativity.py: No module named 'extlibroot.lsub1.lsub2.notexist' (py3 !)
   hg: unknown command 'checkrelativity' (py3 !)
   (use 'hg help' for a list of commands) (py3 !)
 
@@ -1882,7 +1882,7 @@
   > EOF
 
   $ hg deprecatedcmd > /dev/null
-  *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+  *** failed to import extension "deprecatedcmd" from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
   *** (use @command decorator to register 'deprecatedcmd')
   hg: unknown command 'deprecatedcmd'
   (use 'hg help' for a list of commands)
@@ -1891,7 +1891,7 @@
  the extension shouldn't be loaded at all so the mq works:
 
   $ hg qseries --config extensions.mq= > /dev/null
-  *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+  *** failed to import extension "deprecatedcmd" from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
   *** (use @command decorator to register 'deprecatedcmd')
 
   $ cd ..
@@ -1939,8 +1939,117 @@
   > test_unicode_default_value = $TESTTMP/test_unicode_default_value.py
   > EOF
   $ hg -R $TESTTMP/opt-unicode-default dummy
-  *** failed to import extension test_unicode_default_value from $TESTTMP/test_unicode_default_value.py: unicode *'value' found in cmdtable.dummy (glob)
+  *** failed to import extension "test_unicode_default_value" from $TESTTMP/test_unicode_default_value.py: unicode 'value' found in cmdtable.dummy (py3 !)
+  *** failed to import extension "test_unicode_default_value" from $TESTTMP/test_unicode_default_value.py: unicode u'value' found in cmdtable.dummy (no-py3 !)
   *** (use b'' to make it byte string)
   hg: unknown command 'dummy'
   (did you mean summary?)
   [10]
+
+Check the mandatory extension feature
+-------------------------------------
+
+  $ hg init mandatory-extensions
+  $ cat > $TESTTMP/mandatory-extensions/.hg/good.py << EOF
+  > pass
+  > EOF
+  $ cat > $TESTTMP/mandatory-extensions/.hg/bad.py << EOF
+  > raise RuntimeError("babar")
+  > EOF
+  $ cat > $TESTTMP/mandatory-extensions/.hg/syntax.py << EOF
+  > def (
+  > EOF
+
+Check that the good one load :
+
+  $ cat > $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > [extensions]
+  > good = $TESTTMP/mandatory-extensions/.hg/good.py
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  000000000000 tip
+
+Make it mandatory to load
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > good:required = yes
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  000000000000 tip
+
+Check that the bad one does not load
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > bad = $TESTTMP/mandatory-extensions/.hg/bad.py
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  *** failed to import extension "bad" from $TESTTMP/mandatory-extensions/.hg/bad.py: babar
+  000000000000 tip
+
+Make it mandatory to load
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > bad:required = yes
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  abort: failed to import extension "bad" from $TESTTMP/mandatory-extensions/.hg/bad.py: babar
+  (loading of this extension was required, see `hg help config.extensions` for details)
+  [255]
+
+Make it not mandatory to load
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > bad:required = no
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  *** failed to import extension "bad" from $TESTTMP/mandatory-extensions/.hg/bad.py: babar
+  000000000000 tip
+
+Same check with the syntax error one
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > bad = !
+  > syntax = $TESTTMP/mandatory-extensions/.hg/syntax.py
+  > syntax:required = yes
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  abort: failed to import extension "syntax" from $TESTTMP/mandatory-extensions/.hg/syntax.py: invalid syntax (*syntax.py, line 1) (glob)
+  (loading of this extension was required, see `hg help config.extensions` for details)
+  [255]
+
+Same check with a missing one
+
+  $ cat >> $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > syntax = !
+  > syntax:required =
+  > missing = foo/bar/baz/I/do/not/exist/
+  > missing:required = yes
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  abort: failed to import extension "missing" from foo/bar/baz/I/do/not/exist/: [Errno 2] $ENOENT$: 'foo/bar/baz/I/do/not/exist'
+  (loading of this extension was required, see `hg help config.extensions` for details)
+  [255]
+
+Have a "default" setting for the suboption:
+
+  $ cat > $TESTTMP/mandatory-extensions/.hg/hgrc << EOF
+  > [extensions]
+  > bad = $TESTTMP/mandatory-extensions/.hg/bad.py
+  > bad:required = no
+  > good = $TESTTMP/mandatory-extensions/.hg/good.py
+  > syntax = $TESTTMP/mandatory-extensions/.hg/syntax.py
+  > *:required = yes
+  > EOF
+
+  $ hg -R mandatory-extensions id
+  *** failed to import extension "bad" from $TESTTMP/mandatory-extensions/.hg/bad.py: babar
+  abort: failed to import extension "syntax" from $TESTTMP/mandatory-extensions/.hg/syntax.py: invalid syntax (*syntax.py, line 1) (glob)
+  (loading of this extension was required, see `hg help config.extensions` for details)
+  [255]
--- a/tests/test-graft.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-graft.t	Wed Dec 08 10:14:37 2021 +0100
@@ -212,7 +212,7 @@
    ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
-   b: local copied/moved from a -> m (premerge)
+   b: local copied/moved from a -> m
   picked tool ':merge' for b (binary False symlink False changedelete False)
   merging b and a to b
   my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622
@@ -242,13 +242,10 @@
    d: remote is newer -> g
   getting d
    preserving e for resolve of e
-   e: versions differ -> m (premerge)
+   e: versions differ -> m
   picked tool ':merge' for e (binary False symlink False changedelete False)
   merging e
   my e@1905859650ec+ other e@9c233e8e184d ancestor e@4c60f11aa304
-   e: versions differ -> m (merge)
-  picked tool ':merge' for e (binary False symlink False changedelete False)
-  my e@1905859650ec+ other e@9c233e8e184d ancestor e@4c60f11aa304
   warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
   (use 'hg resolve' and 'hg graft --continue')
@@ -855,8 +852,8 @@
   $ hg graft -r 6 --base 5
   grafting 6:25a2b029d3ae "6"
   merging d
+  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   merging e
-  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
   (use 'hg resolve' and 'hg graft --continue')
   [1]
--- a/tests/test-help.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-help.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1517,26 +1517,38 @@
       "commands.update.check"
           Determines what level of checking 'hg update' will perform before
           moving to a destination revision. Valid values are "abort", "none",
-          "linear", and "noconflict". "abort" always fails if the working
-          directory has uncommitted changes. "none" performs no checking, and
-          may result in a merge with uncommitted changes. "linear" allows any
-          update as long as it follows a straight line in the revision history,
-          and may trigger a merge with uncommitted changes. "noconflict" will
-          allow any update which would not trigger a merge with uncommitted
-          changes, if any are present. (default: "linear")
+          "linear", and "noconflict".
+  
+          - "abort" always fails if the working directory has uncommitted
+            changes.
+          - "none" performs no checking, and may result in a merge with
+            uncommitted changes.
+          - "linear" allows any update as long as it follows a straight line in
+            the revision history, and may trigger a merge with uncommitted
+            changes.
+          - "noconflict" will allow any update which would not trigger a merge
+            with uncommitted changes, if any are present.
+  
+          (default: "linear")
   
 
   $ hg help config.commands.update.check
       "commands.update.check"
           Determines what level of checking 'hg update' will perform before
           moving to a destination revision. Valid values are "abort", "none",
-          "linear", and "noconflict". "abort" always fails if the working
-          directory has uncommitted changes. "none" performs no checking, and
-          may result in a merge with uncommitted changes. "linear" allows any
-          update as long as it follows a straight line in the revision history,
-          and may trigger a merge with uncommitted changes. "noconflict" will
-          allow any update which would not trigger a merge with uncommitted
-          changes, if any are present. (default: "linear")
+          "linear", and "noconflict".
+  
+          - "abort" always fails if the working directory has uncommitted
+            changes.
+          - "none" performs no checking, and may result in a merge with
+            uncommitted changes.
+          - "linear" allows any update as long as it follows a straight line in
+            the revision history, and may trigger a merge with uncommitted
+            changes.
+          - "noconflict" will allow any update which would not trigger a merge
+            with uncommitted changes, if any are present.
+  
+          (default: "linear")
   
 
   $ hg help config.ommands.update.check
--- a/tests/test-hgignore.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-hgignore.t	Wed Dec 08 10:14:37 2021 +0100
@@ -9,6 +9,10 @@
   > EOF
 #endif
 
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init ignorerepo
   $ cd ignorerepo
 
--- a/tests/test-import-bypass.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-import-bypass.t	Wed Dec 08 10:14:37 2021 +0100
@@ -43,7 +43,7 @@
   unable to find 'a' for patching
   (use '--prefix' to apply patch relative to the current directory)
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg st
   $ shortlog
   o  1:4e322f7ce8e3 test 0 0 - foo - changea
@@ -234,7 +234,7 @@
   patching file a
   Hunk #1 FAILED at 0
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg --config patch.eol=auto import -d '0 0' -m 'test patch.eol' --bypass ../test.diff
   applying ../test.diff
   $ shortlog
--- a/tests/test-import-git.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-import-git.t	Wed Dec 08 10:14:37 2021 +0100
@@ -519,7 +519,8 @@
   > EOF
   applying patch from stdin
   abort: could not decode "binary2" binary patch: bad base85 character at position 6
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
 
   $ hg revert -aq
   $ hg import -d "1000000 0" -m rename-as-binary - <<"EOF"
@@ -534,7 +535,8 @@
   > EOF
   applying patch from stdin
   abort: "binary2" length is 5 bytes, should be 6
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
 
   $ hg revert -aq
   $ hg import -d "1000000 0" -m rename-as-binary - <<"EOF"
@@ -548,7 +550,8 @@
   > EOF
   applying patch from stdin
   abort: could not extract "binary2" binary data
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
 
 Simulate a copy/paste turning LF into CRLF (issue2870)
 
@@ -748,7 +751,7 @@
   > EOF
   applying patch from stdin
   abort: cannot create b: destination already exists
-  [255]
+  [20]
   $ cat b
   b
 
@@ -768,7 +771,7 @@
   cannot create b: destination already exists
   1 out of 1 hunks FAILED -- saving rejects to file b.rej
   abort: patch failed to apply
-  [255]
+  [20]
   $ cat b
   b
 
@@ -791,7 +794,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file linkb.rej
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg st
   ? b.rej
   ? linkb.rej
--- a/tests/test-import-unknown.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-import-unknown.t	Wed Dec 08 10:14:37 2021 +0100
@@ -29,7 +29,7 @@
   file added already exists
   1 out of 1 hunks FAILED -- saving rejects to file added.rej
   abort: patch failed to apply
-  [255]
+  [20]
 
 Test modifying an unknown file
 
@@ -41,7 +41,7 @@
   $ hg import --no-commit ../unknown.diff
   applying ../unknown.diff
   abort: cannot patch changed: file is not tracked
-  [255]
+  [20]
 
 Test removing an unknown file
 
@@ -54,7 +54,7 @@
   $ hg import --no-commit ../unknown.diff
   applying ../unknown.diff
   abort: cannot patch removed: file is not tracked
-  [255]
+  [20]
 
 Test copying onto an unknown file
 
@@ -64,6 +64,6 @@
   $ hg import --no-commit ../unknown.diff
   applying ../unknown.diff
   abort: cannot create copied: destination already exists
-  [255]
+  [20]
 
   $ cd ..
--- a/tests/test-import.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-import.t	Wed Dec 08 10:14:37 2021 +0100
@@ -234,7 +234,8 @@
   $ hg --cwd b import -mpatch ../broken.patch
   applying ../broken.patch
   abort: bad hunk #1
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
   $ rm -r b
 
 hg -R repo import
@@ -834,7 +835,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg import --no-commit -v fuzzy-tip.patch
   applying fuzzy-tip.patch
   patching file a
@@ -853,7 +854,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg up -qC
   $ hg import --config patch.fuzz=2 --exact fuzzy-reparent.patch
   applying fuzzy-reparent.patch
@@ -1084,7 +1085,7 @@
   > EOF
   applying patch from stdin
   abort: path contains illegal component: ../outside/foo
-  [255]
+  [10]
   $ cd ..
 
 
@@ -2054,7 +2055,7 @@
   (use '--prefix' to apply patch relative to the current directory)
   1 out of 1 hunks FAILED -- saving rejects to file file1.rej
   abort: patch failed to apply
-  [255]
+  [20]
 
 test import crash (issue5375)
   $ cd ..
@@ -2064,7 +2065,7 @@
   applying patch from stdin
   a not tracked!
   abort: source file 'a' does not exist
-  [255]
+  [20]
 
 test immature end of hunk
 
@@ -2076,7 +2077,8 @@
   > EOF
   applying patch from stdin
   abort: bad hunk #1: incomplete hunk
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
 
   $ hg import - <<'EOF'
   > diff --git a/foo b/foo
@@ -2087,4 +2089,5 @@
   > EOF
   applying patch from stdin
   abort: bad hunk #1: incomplete hunk
-  [255]
+  (check that whitespace in the patch has not been mangled)
+  [10]
--- a/tests/test-infinitepush-ci.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-infinitepush-ci.t	Wed Dec 08 10:14:37 2021 +0100
@@ -204,7 +204,7 @@
   $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
   pulling from $TESTTMP/repo
   abort: unknown revision 'b4e4bce660512ad3e71189e14588a70ac8e31fef'
-  [255]
+  [10]
   $ hg glog
   o  1:6cb0989601f1 added a
   |  public
--- a/tests/test-issue6528.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-issue6528.t	Wed Dec 08 10:14:37 2021 +0100
@@ -2,6 +2,10 @@
 Test non-regression on the corruption associated with issue6528
 ===============================================================
 
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
 Setup
 =====
 
--- a/tests/test-issue672.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-issue672.t	Wed Dec 08 10:14:37 2021 +0100
@@ -65,7 +65,7 @@
    ancestor: c64f439569a9, local: f4a9cff3cd0b+, remote: 746e9549ea96
   starting 4 threads for background file closing (?)
    preserving 1a for resolve of 1a
-   1a: local copied/moved from 1 -> m (premerge)
+   1a: local copied/moved from 1 -> m
   picked tool ':merge' for 1a (binary False symlink False changedelete False)
   merging 1a and 1 to 1a
   my 1a@f4a9cff3cd0b+ other 1@746e9549ea96 ancestor 1@c64f439569a9
@@ -89,7 +89,7 @@
   starting 4 threads for background file closing (?)
    preserving 1 for resolve of 1a
   removing 1
-   1a: remote moved from 1 -> m (premerge)
+   1a: remote moved from 1 -> m
   picked tool ':merge' for 1a (binary False symlink False changedelete False)
   merging 1 and 1a to 1a
   my 1a@746e9549ea96+ other 1a@f4a9cff3cd0b ancestor 1@c64f439569a9
--- a/tests/test-largefiles-misc.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-largefiles-misc.t	Wed Dec 08 10:14:37 2021 +0100
@@ -41,7 +41,7 @@
   > EOF
 
   $ hg config extensions
-  \*\*\* failed to import extension largefiles from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
+  \*\*\* failed to import extension "largefiles" from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
   abort: repository requires features unknown to this Mercurial: largefiles
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
--- a/tests/test-largefiles-update.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-largefiles-update.t	Wed Dec 08 10:14:37 2021 +0100
@@ -68,20 +68,39 @@
 A linear merge will update standins before performing the actual merge. It will
 do a lfdirstate status walk and find 'unset'/'unsure' files, hash them, and
 update the corresponding standins.
+
 Verify that it actually marks the clean files as clean in lfdirstate so
 we don't have to hash them again next time we update.
 
+# note:
+#    We do this less agressively now, to avoid race condition, however the
+#    cache
+#    is properly set after the next status
+#
+#    The "changed" output is marked as missing-correct-output/known-bad-output
+#    for clarify
+
   $ hg up
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   updated to "f74e50bd9e55: #2"
   1 other heads for branch "default"
   $ hg debugdirstate --large --nodate
+  n 644          7 set                 large1 (missing-correct-output !)
+  n 644         13 set                 large2 (missing-correct-output !)
+  n   0         -1 unset               large1 (known-bad-output !)
+  n   0         -1 unset               large2 (known-bad-output !)
+  $ sleep 1 # so that mtime are not ambiguous
+  $ hg status
+  $ hg debugdirstate --large --nodate
   n 644          7 set                 large1
   n 644         13 set                 large2
 
 Test that lfdirstate keeps track of last modification of largefiles and
 prevents unnecessary hashing of content - also after linear/noop update
 
+(XXX Since there is a possible race during update, we only do this after the next
+status call, this is slower, but more correct)
+
   $ sleep 1
   $ hg st
   $ hg debugdirstate --large --nodate
@@ -92,6 +111,13 @@
   updated to "f74e50bd9e55: #2"
   1 other heads for branch "default"
   $ hg debugdirstate --large --nodate
+  n 644          7 set                 large1 (missing-correct-output !)
+  n 644         13 set                 large2 (missing-correct-output !)
+  n   0         -1 unset               large1 (known-bad-output !)
+  n   0         -1 unset               large2 (known-bad-output !)
+  $ sleep 1 # so that mtime are not ambiguous
+  $ hg status
+  $ hg debugdirstate --large --nodate
   n 644          7 set                 large1
   n 644         13 set                 large2
 
--- a/tests/test-lfs-largefiles.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-lfs-largefiles.t	Wed Dec 08 10:14:37 2021 +0100
@@ -345,7 +345,7 @@
 breaks you can get 1048576 lines of +y in the output, which takes a looooooong
 time to print.
   $ hg diff -r 2:3 | head -n 20
-  $ hg diff -r 2:6
+  $ hg diff -r 2:6 | head -n 20
   diff -r e989d0fa3764 -r 752e3a0d8488 large.bin
   --- a/large.bin	Thu Jan 01 00:00:00 1970 +0000
   +++ b/large.bin	Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-lfs.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-lfs.t	Wed Dec 08 10:14:37 2021 +0100
@@ -40,7 +40,7 @@
   > EOF
 
   $ hg config extensions
-  \*\*\* failed to import extension lfs from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
+  \*\*\* failed to import extension "lfs" from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
   abort: repository requires features unknown to this Mercurial: lfs
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
--- a/tests/test-log-bookmark.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-log-bookmark.t	Wed Dec 08 10:14:37 2021 +0100
@@ -189,10 +189,10 @@
 
   $ hg log -B unknown
   abort: bookmark 'unknown' does not exist
-  [255]
+  [10]
 
 Shouldn't accept string-matcher syntax:
 
   $ hg log -B 're:.*'
   abort: bookmark 're:.*' does not exist
-  [255]
+  [10]
--- a/tests/test-log.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-log.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1417,7 +1417,7 @@
 
   $ hg log -b 're:.*'
   abort: unknown revision 're:.*'
-  [255]
+  [10]
   $ hg log -k 're:.*'
   $ hg log -u 're:.*'
 
@@ -1544,7 +1544,7 @@
 
   $ hg log -b dummy
   abort: unknown revision 'dummy'
-  [255]
+  [10]
 
 
 log -b .
--- a/tests/test-merge-commit.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-commit.t	Wed Dec 08 10:14:37 2021 +0100
@@ -72,7 +72,7 @@
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28
   starting 4 threads for background file closing (?)
    preserving bar for resolve of bar
-   bar: versions differ -> m (premerge)
+   bar: versions differ -> m
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging bar
   my bar@2263c1be0967+ other bar@0555950ead28 ancestor bar@0f2ff26688b9
@@ -159,7 +159,7 @@
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0
   starting 4 threads for background file closing (?)
    preserving bar for resolve of bar
-   bar: versions differ -> m (premerge)
+   bar: versions differ -> m
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging bar
   my bar@2263c1be0967+ other bar@3ffa6b9e35f0 ancestor bar@0f2ff26688b9
--- a/tests/test-merge-criss-cross.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-criss-cross.t	Wed Dec 08 10:14:37 2021 +0100
@@ -93,13 +93,10 @@
    f1: remote is newer -> g
   getting f1
    preserving f2 for resolve of f2
-   f2: versions differ -> m (premerge)
+   f2: versions differ -> m
   picked tool ':dump' for f2 (binary False symlink False changedelete False)
   merging f2
   my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
-   f2: versions differ -> m (merge)
-  picked tool ':dump' for f2 (binary False symlink False changedelete False)
-  my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
   1 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
--- a/tests/test-merge-exec.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-exec.t	Wed Dec 08 10:14:37 2021 +0100
@@ -4,7 +4,6 @@
 
 #require execbit
 
-
 Initial setup
 ==============
 
--- a/tests/test-merge-force.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-force.t	Wed Dec 08 10:14:37 2021 +0100
@@ -218,27 +218,27 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_content1_content4-tracked
+  warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark')
   merging content1_content2_content2_content1-tracked
   merging content1_content2_content2_content4-tracked
+  warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
   merging content1_content2_content3_content1-tracked
   merging content1_content2_content3_content3-tracked
+  warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
   merging content1_content2_content3_content4-tracked
+  warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
   merging content1_content2_missing_content1-tracked
   merging content1_content2_missing_content4-tracked
+  warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_content2_content4-tracked
+  warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_content3_content3-tracked
+  warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_content3_content4-tracked
+  warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-tracked
+  warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-untracked
-  warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark')
   18 files updated, 3 files merged, 8 files removed, 35 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -735,6 +735,7 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_content1_content4-tracked
+  warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark')
   file 'content1_content2_content1_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
@@ -752,6 +753,7 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_content2_content4-tracked
+  warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
   file 'content1_content2_content2_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
@@ -769,10 +771,12 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_content3_content3-tracked
+  warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
   file 'content1_content2_content3_content3-untracked' was deleted in local [working copy] but was modified in other [merge rev].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_content3_content4-tracked
+  warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
   file 'content1_content2_content3_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
@@ -790,6 +794,7 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   merging content1_content2_missing_content4-tracked
+  warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   file 'content1_content2_missing_content4-untracked' was deleted in local [working copy] but was modified in other [merge rev].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
@@ -812,19 +817,14 @@
   You can use (c)hanged version, (d)elete, or leave (u)nresolved.
   What do you want to do? u
   merging missing_content2_content2_content4-tracked
+  warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_content3_content3-tracked
+  warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_content3_content4-tracked
+  warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-tracked
+  warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-untracked
-  warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark')
   [1]
   $ checkstatus > $TESTTMP/status2 2>&1
--- a/tests/test-merge-halt.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-halt.t	Wed Dec 08 10:14:37 2021 +0100
@@ -24,8 +24,8 @@
   $ hg rebase -s 1 -d 2 --tool false
   rebasing 1:1f28a51c3c9b "c"
   merging a
+  merging a failed!
   merging b
-  merging a failed!
   merging b failed!
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   [240]
@@ -42,7 +42,6 @@
   $ hg rebase -s 1 -d 2 --tool false
   rebasing 1:1f28a51c3c9b "c"
   merging a
-  merging b
   merging a failed!
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   [240]
@@ -67,9 +66,9 @@
   > EOS
   rebasing 1:1f28a51c3c9b "c"
   merging a
-  merging b
   merging a failed!
   continue merge operation (yn)? y
+  merging b
   merging b failed!
   continue merge operation (yn)? n
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
@@ -94,9 +93,9 @@
   > EOS
   rebasing 1:1f28a51c3c9b "c"
   merging a
-  merging b
    output file a appears unchanged
   was merge successful (yn)? y
+  merging b
    output file b appears unchanged
   was merge successful (yn)? n
   merging b failed!
@@ -122,7 +121,6 @@
   $ hg rebase -s 1 -d 2 --tool true
   rebasing 1:1f28a51c3c9b "c"
   merging a
-  merging b
   merging a failed!
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   [240]
@@ -141,8 +139,8 @@
   > EOS
   rebasing 1:1f28a51c3c9b "c"
   merging a
+  was merge of 'a' successful (yn)? y
   merging b
-  was merge of 'a' successful (yn)? y
   was merge of 'b' successful (yn)? n
   merging b failed!
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
@@ -159,8 +157,8 @@
   $ hg rebase -s 1 -d 2 --tool echo --keep --config merge-tools.echo.premerge=keep
   rebasing 1:1f28a51c3c9b "c"
   merging a
+  $TESTTMP/repo/a *a~base* *a~other* (glob)
   merging b
-  $TESTTMP/repo/a *a~base* *a~other* (glob)
   $TESTTMP/repo/b *b~base* *b~other* (glob)
 
 Check that unshelve isn't broken by halting the merge
@@ -187,7 +185,6 @@
   unshelving change 'default'
   rebasing shelved changes
   merging shelve_file1
-  merging shelve_file2
   merging shelve_file1 failed!
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [240]
@@ -195,7 +192,6 @@
   M shelve_file1
   M shelve_file2
   ? shelve_file1.orig
-  ? shelve_file2.orig
   # The repository is in an unfinished *unshelve* state.
   
   # Unresolved merge conflicts:
@@ -210,7 +206,6 @@
   
   $ hg resolve --tool false --all --re-merge
   merging shelve_file1
-  merging shelve_file2
   merging shelve_file1 failed!
   merge halted after failed merge (see hg resolve)
   [240]
--- a/tests/test-merge-tools.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-tools.t	Wed Dec 08 10:14:37 2021 +0100
@@ -578,7 +578,6 @@
   $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool
   couldn't find merge tool true (for pattern f)
   merging f
-  couldn't find merge tool true (for pattern f)
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -604,7 +603,6 @@
   $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool
   couldn't find merge tool true (for pattern f)
   merging f
-  couldn't find merge tool true (for pattern f)
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -1837,7 +1835,6 @@
   $ hg merge -y -r 2 --config ui.merge=missingbinary
   couldn't find merge tool missingbinary (for pattern f)
   merging f
-  couldn't find merge tool missingbinary (for pattern f)
   revision 1
   space
   revision 0
--- a/tests/test-merge-types.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge-types.t	Wed Dec 08 10:14:37 2021 +0100
@@ -34,7 +34,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 521a1e40188f+, remote: 3574f3e69b1c
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   tool internal:merge (for pattern a) can't handle symlinks
   couldn't find merge tool hgmerge
   no tool found to merge a
@@ -68,7 +68,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool ':union' for a (binary False symlink True changedelete False)
   merging a
   my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
@@ -90,7 +90,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool ':merge3' for a (binary False symlink True changedelete False)
   merging a
   my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
@@ -112,7 +112,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool ':merge-local' for a (binary False symlink True changedelete False)
   merging a
   my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
@@ -133,7 +133,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool ':merge-other' for a (binary False symlink True changedelete False)
   merging a
   my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
@@ -166,7 +166,7 @@
    branchmerge: False, force: False, partial: False
    ancestor: c334dc3be0da, local: c334dc3be0da+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   (couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re)
   no tool found to merge a
   picked tool ':prompt' for a (binary False symlink True changedelete False)
@@ -343,9 +343,12 @@
 
   $ hg merge
   merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   warning: cannot merge flags for b without common ancestor - keeping local flags
   merging b
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   merging bx
+  warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
   warning: cannot merge flags for c without common ancestor - keeping local flags
   tool internal:merge (for pattern d) can't handle symlinks
   no tool found to merge d
@@ -362,9 +365,6 @@
   file 'h' needs to be resolved.
   You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
   What do you want to do? u
-  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
   3 files updated, 0 files merged, 0 files removed, 6 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
@@ -411,9 +411,12 @@
   $ hg up -Cqr1
   $ hg merge
   merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   warning: cannot merge flags for b without common ancestor - keeping local flags
   merging b
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   merging bx
+  warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
   warning: cannot merge flags for c without common ancestor - keeping local flags
   tool internal:merge (for pattern d) can't handle symlinks
   no tool found to merge d
@@ -430,9 +433,6 @@
   file 'h' needs to be resolved.
   You can keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved.
   What do you want to do? u
-  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging bx! (edit, then use 'hg resolve --mark')
   3 files updated, 0 files merged, 0 files removed, 6 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
--- a/tests/test-merge1.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge1.t	Wed Dec 08 10:14:37 2021 +0100
@@ -349,6 +349,10 @@
 aren't changed), even if none of mode, size and timestamp of them
 isn't changed on the filesystem (see also issue4583).
 
+This test is now "best effort" as the mechanism to prevent such race are
+getting better, it get more complicated to test a specific scenario that would
+trigger it. If you see flakyness here, there is a race.
+
   $ cat > $TESTTMP/abort.py <<EOF
   > from __future__ import absolute_import
   > # emulate aborting before "recordupdates()". in this case, files
@@ -365,13 +369,6 @@
   >     extensions.wrapfunction(merge, "applyupdates", applyupdates)
   > EOF
 
-  $ cat >> .hg/hgrc <<EOF
-  > [fakedirstatewritetime]
-  > # emulate invoking dirstate.write() via repo.status()
-  > # at 2000-01-01 00:00
-  > fakenow = 200001010000
-  > EOF
-
 (file gotten from other revision)
 
   $ hg update -q -C 2
@@ -381,12 +378,8 @@
   $ hg update -q -C 3
   $ cat b
   This is file b1
-  $ touch -t 200001010000 b
-  $ hg debugrebuildstate
-
   $ cat >> .hg/hgrc <<EOF
   > [extensions]
-  > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
   > abort = $TESTTMP/abort.py
   > EOF
   $ hg merge 5
@@ -394,13 +387,11 @@
   [255]
   $ cat >> .hg/hgrc <<EOF
   > [extensions]
-  > fakedirstatewritetime = !
   > abort = !
   > EOF
 
   $ cat b
   THIS IS FILE B5
-  $ touch -t 200001010000 b
   $ hg status -A b
   M b
 
@@ -413,12 +404,10 @@
 
   $ cat b
   this is file b6
-  $ touch -t 200001010000 b
-  $ hg debugrebuildstate
+  $ hg status
 
   $ cat >> .hg/hgrc <<EOF
   > [extensions]
-  > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
   > abort = $TESTTMP/abort.py
   > EOF
   $ hg merge --tool internal:other 5
@@ -426,13 +415,11 @@
   [255]
   $ cat >> .hg/hgrc <<EOF
   > [extensions]
-  > fakedirstatewritetime = !
   > abort = !
   > EOF
 
   $ cat b
   THIS IS FILE B5
-  $ touch -t 200001010000 b
   $ hg status -A b
   M b
 
--- a/tests/test-merge7.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge7.t	Wed Dec 08 10:14:37 2021 +0100
@@ -86,13 +86,10 @@
    ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8
   starting 4 threads for background file closing (?)
    preserving test.txt for resolve of test.txt
-   test.txt: versions differ -> m (premerge)
+   test.txt: versions differ -> m
   picked tool ':merge' for test.txt (binary False symlink False changedelete False)
   merging test.txt
   my test.txt@50c3a7e29886+ other test.txt@40d11a4173a8 ancestor test.txt@96b70246a118
-   test.txt: versions differ -> m (merge)
-  picked tool ':merge' for test.txt (binary False symlink False changedelete False)
-  my test.txt@50c3a7e29886+ other test.txt@40d11a4173a8 ancestor test.txt@96b70246a118
   warning: conflicts while merging test.txt! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
--- a/tests/test-merge9.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-merge9.t	Wed Dec 08 10:14:37 2021 +0100
@@ -27,8 +27,8 @@
 test with the rename on the remote side
   $ HGMERGE=false hg merge
   merging bar
+  merging bar failed!
   merging foo and baz to baz
-  merging bar failed!
   1 files updated, 1 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
@@ -41,8 +41,8 @@
   3 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ HGMERGE=false hg merge
   merging bar
+  merging bar failed!
   merging baz and foo to baz
-  merging bar failed!
   1 files updated, 1 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
   [1]
--- a/tests/test-narrow-expanddirstate.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-narrow-expanddirstate.t	Wed Dec 08 10:14:37 2021 +0100
@@ -142,7 +142,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file patchdir/f3.rej
   abort: patch failed to apply
-  [255]
+  [20]
   $ hg tracked | grep patchdir
   [1]
   $ hg files | grep patchdir > /dev/null
--- a/tests/test-narrow-merge.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-narrow-merge.t	Wed Dec 08 10:14:37 2021 +0100
@@ -101,4 +101,4 @@
   $ hg merge 'desc("conflicting outside/f1")'
   abort: conflict in file 'outside/f1' is outside narrow clone (flat !)
   abort: conflict in file 'outside/' is outside narrow clone (tree !)
-  [255]
+  [20]
--- a/tests/test-narrow-rebase.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-narrow-rebase.t	Wed Dec 08 10:14:37 2021 +0100
@@ -96,4 +96,4 @@
   $ hg rebase -d 'desc("modify outside/f1")'
   rebasing 4:707c035aadb6 "conflicting outside/f1"
   abort: conflict in file 'outside/f1' is outside narrow clone
-  [255]
+  [20]
--- a/tests/test-obsolete-distributed.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-obsolete-distributed.t	Wed Dec 08 10:14:37 2021 +0100
@@ -570,7 +570,7 @@
   added 2 changesets with 0 changes to 2 files (+1 heads)
   (2 other changesets obsolete on arrival)
   abort: cannot update to target: filtered revision '6'
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-permissions.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-permissions.t	Wed Dec 08 10:14:37 2021 +0100
@@ -11,6 +11,10 @@
   > EOF
 #endif
 
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init t
   $ cd t
 
--- a/tests/test-pull-r.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-pull-r.t	Wed Dec 08 10:14:37 2021 +0100
@@ -112,7 +112,7 @@
 
   $ hg pull -qr missing ../repo
   abort: unknown revision 'missing'
-  [255]
+  [10]
 
 Pull multiple revisions with update:
 
--- a/tests/test-purge.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-purge.t	Wed Dec 08 10:14:37 2021 +0100
@@ -29,7 +29,7 @@
   $ hg st
   $ touch foo
   $ hg purge
-  permanently delete 1 unkown files? (yN) n
+  permanently delete 1 unknown files? (yN) n
   abort: removal cancelled
   [250]
   $ hg st
@@ -93,7 +93,7 @@
   untracked_file
   untracked_file_readonly
   $ hg purge --confirm
-  permanently delete 2 unkown files? (yN) n
+  permanently delete 2 unknown files? (yN) n
   abort: removal cancelled
   [250]
   $ hg purge -v
@@ -156,7 +156,7 @@
   $ hg purge -p ../untracked_directory
   untracked_directory/nested_directory
   $ hg purge --confirm
-  permanently delete 1 unkown files? (yN) n
+  permanently delete 1 unknown files? (yN) n
   abort: removal cancelled
   [250]
   $ hg purge -v ../untracked_directory
@@ -203,7 +203,7 @@
   ignored
   untracked_file
   $ hg purge --confirm --all
-  permanently delete 1 unkown and 1 ignored files? (yN) n
+  permanently delete 1 unknown and 1 ignored files? (yN) n
   abort: removal cancelled
   [250]
   $ hg purge -v --all
--- a/tests/test-qrecord.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-qrecord.t	Wed Dec 08 10:14:37 2021 +0100
@@ -117,7 +117,7 @@
 
   $ echo "mq=nonexistent" >> $HGRCPATH
   $ hg help qrecord
-  *** failed to import extension mq from nonexistent: [Errno *] * (glob)
+  *** failed to import extension "mq" from nonexistent: [Errno *] * (glob)
   hg qrecord [OPTION]... PATCH [FILE]...
   
   interactively record a new patch
--- a/tests/test-rebuildstate.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rebuildstate.t	Wed Dec 08 10:14:37 2021 +0100
@@ -79,6 +79,7 @@
   $ touch foo bar qux
   $ hg add qux
   $ hg remove bar
+  $ sleep 1 # remove potential ambiguity in mtime
   $ hg status -A
   A qux
   R bar
@@ -106,6 +107,7 @@
   $ hg manifest
   bar
   foo
+  $ sleep 1 # remove potential ambiguity in mtime
   $ hg status -A
   A qux
   R bar
--- a/tests/test-remotefilelog-repack.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-remotefilelog-repack.t	Wed Dec 08 10:14:37 2021 +0100
@@ -307,7 +307,7 @@
   1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over * (glob)
   $ hg prefetch -r 38
   abort: unknown revision '38'
-  [255]
+  [10]
   $ ls_l $TESTTMP/hgcache/master/packs/ | grep datapack
   -r--r--r--      70 052643fdcdebbd42d7c180a651a30d46098e6fe1.datapack
   $ ls_l $TESTTMP/hgcache/master/packs/ | grep histpack
--- a/tests/test-rename-dir-merge.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rename-dir-merge.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1,3 +1,7 @@
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init t
   $ cd t
 
--- a/tests/test-rename-merge1.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rename-merge1.t	Wed Dec 08 10:14:37 2021 +0100
@@ -44,7 +44,7 @@
   getting b2
    preserving a for resolve of b
   removing a
-   b: remote moved from a -> m (premerge)
+   b: remote moved from a -> m
   picked tool ':merge' for b (binary False symlink False changedelete False)
   merging a and b to b
   my b@044f8520aeeb+ other b@85c198ef2f6c ancestor a@af1939970a1c
@@ -218,7 +218,7 @@
    ancestor: 5151c134577e, local: 07fcbc9a74ed+, remote: f21419739508
   starting 4 threads for background file closing (?)
    preserving z for resolve of z
-   z: both renamed from y -> m (premerge)
+   z: both renamed from y -> m
   picked tool ':merge3' for z (binary False symlink False changedelete False)
   merging z
   my z@07fcbc9a74ed+ other z@f21419739508 ancestor y@5151c134577e
--- a/tests/test-rename-merge2.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rename-merge2.t	Wed Dec 08 10:14:37 2021 +0100
@@ -88,18 +88,15 @@
   starting 4 threads for background file closing (?)
    preserving a for resolve of b
    preserving rev for resolve of rev
-   b: remote copied from a -> m (premerge)
+   b: remote copied from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
   my b@e300d1c794ec+ other b@4ce40f5aca24 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -128,18 +125,15 @@
   getting a
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m (premerge)
+   b: local copied/moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
   my b@86a2aa42fc76+ other a@f4db7e329e71 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -168,18 +162,15 @@
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-   b: remote moved from a -> m (premerge)
+   b: remote moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
   my b@e300d1c794ec+ other b@bdb19105162a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -206,18 +197,15 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m (premerge)
+   b: local copied/moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
   my b@02963e448370+ other a@f4db7e329e71 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -244,13 +232,10 @@
    b: remote created -> g
   getting b
    preserving rev for resolve of rev
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -276,13 +261,10 @@
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 97c705ade336
   starting 4 threads for background file closing (?)
    preserving rev for resolve of rev
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -311,13 +293,10 @@
    b: remote created -> g
   getting b
    preserving rev for resolve of rev
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 1 files removed, 0 files unresolved
@@ -342,13 +321,10 @@
    ancestor: 924404dff337, local: 02963e448370+, remote: 97c705ade336
   starting 4 threads for background file closing (?)
    preserving rev for resolve of rev
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -374,22 +350,16 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
-   b: both renamed from a -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -425,13 +395,10 @@
    c: remote created -> g
   getting c
    preserving rev for resolve of rev
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -456,22 +423,16 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both created -> m (premerge)
+   b: both created -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
-   b: both created -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -498,22 +459,16 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both created -> m (premerge)
+   b: both created -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
-   b: both created -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 1 files removed, 0 files unresolved
@@ -538,18 +493,15 @@
   getting a
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -576,22 +528,16 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both created -> m (premerge)
+   b: both created -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
-   b: both created -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 1 files removed, 0 files unresolved
@@ -616,18 +562,15 @@
   getting a
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -652,18 +595,15 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -688,18 +628,15 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@02963e448370+ other b@8dbce441892a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -723,18 +660,15 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m (premerge)
+   b: both renamed from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@0b76e65c8289+ other b@bdb19105162a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -762,22 +696,16 @@
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-   b: remote moved from a -> m (premerge)
+   b: remote moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
   my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
-   b: remote moved from a -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -804,22 +732,16 @@
   starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m (premerge)
+   b: local copied/moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
   my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
-   rev: versions differ -> m (premerge)
+  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
+  merge tool returned: 0
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
-   b: local copied/moved from a -> m (merge)
-  picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
-  my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
-  launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
-  merge tool returned: 0
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -852,18 +774,15 @@
   getting c
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m (premerge)
+   b: local copied/moved from a -> m
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
   my b@02963e448370+ other a@2b958612230f ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m (premerge)
+   rev: versions differ -> m
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
   my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
-   rev: versions differ -> m (merge)
-  picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
-  my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
   launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rename.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rename.t	Wed Dec 08 10:14:37 2021 +0100
@@ -610,7 +610,7 @@
 
   $ hg rename d1/d11/a1 .hg/foo
   abort: path contains illegal component: .hg/foo
-  [255]
+  [10]
   $ hg status -C
   $ hg rename d1/d11/a1 ../foo
   abort: ../foo not under root '$TESTTMP'
@@ -620,7 +620,7 @@
   $ mv d1/d11/a1 .hg/foo
   $ hg rename --after d1/d11/a1 .hg/foo
   abort: path contains illegal component: .hg/foo
-  [255]
+  [10]
   $ hg status -C
   ! d1/d11/a1
   $ hg update -C
@@ -629,11 +629,11 @@
 
   $ hg rename d1/d11/a1 .hg
   abort: path contains illegal component: .hg/a1
-  [255]
+  [10]
   $ hg --config extensions.largefiles= rename d1/d11/a1 .hg
   The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
   abort: path contains illegal component: .hg/a1
-  [255]
+  [10]
   $ hg status -C
   $ hg rename d1/d11/a1 ..
   abort: ../a1 not under root '$TESTTMP'
@@ -647,7 +647,7 @@
   $ mv d1/d11/a1 .hg
   $ hg rename --after d1/d11/a1 .hg
   abort: path contains illegal component: .hg/a1
-  [255]
+  [10]
   $ hg status -C
   ! d1/d11/a1
   $ hg update -C
@@ -656,7 +656,7 @@
 
   $ (cd d1/d11; hg rename ../../d2/b ../../.hg/foo)
   abort: path contains illegal component: .hg/foo
-  [255]
+  [10]
   $ hg status -C
   $ (cd d1/d11; hg rename ../../d2/b ../../../foo)
   abort: ../../../foo not under root '$TESTTMP'
--- a/tests/test-resolve.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-resolve.t	Wed Dec 08 10:14:37 2021 +0100
@@ -196,8 +196,8 @@
 resolve --all should re-merge all unresolved files
   $ hg resolve --all
   merging file1
+  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   merging file2
-  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
   [1]
   $ cat file1.orig
@@ -211,8 +211,8 @@
   $ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
   merging file1
   creating directory: $TESTTMP/repo/.hg/origbackups
+  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   merging file2
-  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
   [1]
   $ ls .hg/origbackups
@@ -478,10 +478,10 @@
   $ hg rebase -s 1 -d 2
   rebasing 1:f30f98a8181f "added emp1 emp2 emp3"
   merging emp1
+  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
   merging emp2
+  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   merging emp3
-  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   [240]
@@ -490,10 +490,10 @@
 ===========================================================
   $ hg resolve --all
   merging emp1
+  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
   merging emp2
+  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   merging emp3
-  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
   [1]
 
@@ -522,10 +522,10 @@
   > EOF
   re-merge all unresolved files (yn)? y
   merging emp1
+  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
   merging emp2
+  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   merging emp3
-  warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
-  warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
   [1]
 
--- a/tests/test-revert.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-revert.t	Wed Dec 08 10:14:37 2021 +0100
@@ -320,7 +320,7 @@
 
   $ hg mv --force a b/b
   $ hg revert b/b
-  $ hg status a b/b
+  $ hg status a b/b --copies
 
   $ cd ..
 
--- a/tests/test-revset.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-revset.t	Wed Dec 08 10:14:37 2021 +0100
@@ -306,7 +306,7 @@
     (negate
       (symbol 'a')))
   abort: unknown revision '-a'
-  [255]
+  [10]
   $ try é
   (symbol '\xc3\xa9')
   * set:
--- a/tests/test-revset2.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-revset2.t	Wed Dec 08 10:14:37 2021 +0100
@@ -870,7 +870,7 @@
   $ try m
   (symbol 'm')
   abort: unknown revision 'm'
-  [255]
+  [10]
 
   $ HGPLAINEXCEPT=revsetalias
   $ export HGPLAINEXCEPT
@@ -1061,7 +1061,7 @@
       (symbol 'max')
       (string '$1')))
   abort: unknown revision '$1'
-  [255]
+  [10]
 
 test scope of alias expansion: 'universe' is expanded prior to 'shadowall(0)',
 but 'all()' should never be substituted to '0()'.
@@ -1601,7 +1601,7 @@
   > EOF
 
   $ hg debugrevspec "custom1()"
-  *** failed to import extension custompredicate from $TESTTMP/custompredicate.py: intentional failure of loading extension
+  *** failed to import extension "custompredicate" from $TESTTMP/custompredicate.py: intentional failure of loading extension
   hg: parse error: unknown identifier: custom1
   [10]
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rhg-sparse-narrow.t	Wed Dec 08 10:14:37 2021 +0100
@@ -0,0 +1,120 @@
+#require rhg
+
+  $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort"
+
+Rhg works well when sparse working copy is enabled.
+
+  $ cd "$TESTTMP"
+  $ hg init repo-sparse
+  $ cd repo-sparse
+  $ cat > .hg/hgrc <<EOF
+  > [extensions]
+  > sparse=
+  > EOF
+
+  $ echo a > show
+  $ echo x > hide
+  $ mkdir dir1 dir2
+  $ echo x > dir1/x
+  $ echo y > dir1/y
+  $ echo z > dir2/z
+
+  $ hg ci -Aqm 'initial'
+  $ hg debugsparse --include 'show'
+  $ ls -A
+  .hg
+  show
+
+  $ tip=$(hg log -r . --template '{node}')
+  $ $NO_FALLBACK rhg files -r "$tip"
+  dir1/x
+  dir1/y
+  dir2/z
+  hide
+  show
+  $ $NO_FALLBACK rhg files
+  show
+
+  $ $NO_FALLBACK rhg cat -r "$tip" hide
+  x
+
+  $ cd ..
+
+We support most things when narrow is enabled, too, with a couple of caveats.
+
+  $ . "$TESTDIR/narrow-library.sh"
+  $ real_hg=$RHG_FALLBACK_EXECUTABLE
+
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > narrow=
+  > EOF
+
+  $ hg clone --narrow  ./repo-sparse repo-narrow --include dir1
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 2 changes to 2 files
+  new changesets 6d714a4a2998
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ cd repo-narrow
+
+  $ $NO_FALLBACK rhg cat -r "$tip" dir1/x
+  x
+  $ "$real_hg" cat -r "$tip" dir1/x
+  x
+
+TODO: bad error message
+
+  $ $NO_FALLBACK rhg cat -r "$tip" hide
+  abort: invalid revision identifier: 6d714a4a2998cbfd0620db44da58b749f6565d63
+  [255]
+  $ "$real_hg" cat -r "$tip" hide
+  [1]
+
+A naive implementation of [rhg files] leaks the paths that are supposed to be
+hidden by narrow, so we just fall back to hg.
+
+  $ $NO_FALLBACK rhg files -r "$tip"
+  unsupported feature: rhg files -r <rev> is not supported in narrow clones
+  [252]
+  $ "$real_hg" files -r "$tip"
+  dir1/x
+  dir1/y
+
+Hg status needs to do some filtering based on narrow spec, so we don't
+support it in rhg for narrow clones yet.
+
+  $ mkdir dir2
+  $ touch dir2/q
+  $ "$real_hg" status
+  $ $NO_FALLBACK rhg --config rhg.status=true status
+  unsupported feature: rhg status is not supported for sparse checkouts or narrow clones yet
+  [252]
+
+Adding "orphaned" index files:
+
+  $ (cd ..; cp repo-sparse/.hg/store/data/hide.i repo-narrow/.hg/store/data/hide.i)
+  $ (cd ..; mkdir repo-narrow/.hg/store/data/dir2; cp repo-sparse/.hg/store/data/dir2/z.i repo-narrow/.hg/store/data/dir2/z.i)
+  $ "$real_hg" verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  checked 1 changesets with 2 changes to 2 files
+
+  $ "$real_hg" files -r "$tip"
+  dir1/x
+  dir1/y
+
+# TODO: even though [hg files] hides the orphaned dir2/z, [hg cat] still shows it.
+# rhg has the same issue, but at least it's not specific to rhg.
+# This is despite [hg verify] succeeding above.
+
+  $ $NO_FALLBACK rhg cat -r "$tip" dir2/z
+  z
+  $ "$real_hg" cat -r "$tip" dir2/z
+  z
--- a/tests/test-rhg.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-rhg.t	Wed Dec 08 10:14:37 2021 +0100
@@ -168,13 +168,12 @@
   $ rhg cat original --exclude="*.rs"
   original content
 
-  $ FALLBACK_EXE="$RHG_FALLBACK_EXECUTABLE"
-  $ unset RHG_FALLBACK_EXECUTABLE
-  $ rhg cat original --exclude="*.rs"
+  $ (unset RHG_FALLBACK_EXECUTABLE; rhg cat original --exclude="*.rs")
   abort: 'rhg.on-unsupported=fallback' without 'rhg.fallback-executable' set.
   [255]
-  $ RHG_FALLBACK_EXECUTABLE="$FALLBACK_EXE"
-  $ export RHG_FALLBACK_EXECUTABLE
+
+  $ (unset RHG_FALLBACK_EXECUTABLE; rhg cat original)
+  original content
 
   $ rhg cat original --exclude="*.rs" --config rhg.fallback-executable=false
   [1]
--- a/tests/test-shelve.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-shelve.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1385,8 +1385,8 @@
   unshelving change 'default-01'
   rebasing shelved changes
   merging bar1
+  warning: conflicts while merging bar1! (edit, then use 'hg resolve --mark')
   merging bar2
-  warning: conflicts while merging bar1! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging bar2! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [240]
--- a/tests/test-sparse-profiles.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-sparse-profiles.t	Wed Dec 08 10:14:37 2021 +0100
@@ -128,8 +128,8 @@
   $ hg merge 1
   temporarily included 2 file(s) in the sparse checkout for merging
   merging backend.sparse
+  warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
   merging data.py
-  warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 2 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -197,8 +197,8 @@
   rebasing 1:a2b1de640a62 "edit profile"
   temporarily included 2 file(s) in the sparse checkout for merging
   merging backend.sparse
+  warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
   merging data.py
-  warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   [240]
--- a/tests/test-static-http.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-static-http.t	Wed Dec 08 10:14:37 2021 +0100
@@ -95,7 +95,7 @@
   $ cd ..
   $ hg clone -r doesnotexist static-http://localhost:$HGPORT/remote local0
   abort: unknown revision 'doesnotexist'
-  [255]
+  [10]
   $ hg clone -r 0 static-http://localhost:$HGPORT/remote local0
   adding changesets
   adding manifests
--- a/tests/test-status-color.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-status-color.t	Wed Dec 08 10:14:37 2021 +0100
@@ -375,8 +375,8 @@
   created new head
   $ hg merge
   merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   merging b
-  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 2 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
--- a/tests/test-status.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-status.t	Wed Dec 08 10:14:37 2021 +0100
@@ -9,6 +9,10 @@
   > EOF
 #endif
 
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init repo1
   $ cd repo1
   $ mkdir a b a/1 b/1 b/2
@@ -218,6 +222,13 @@
   ! deleted
   ? unknown
 
+hg status -n:
+  $ env RHG_STATUS=1 RHG_ON_UNSUPPORTED=abort hg status -n
+  added
+  removed
+  deleted
+  unknown
+
 hg status modified added removed deleted unknown never-existed ignored:
 
   $ hg status modified added removed deleted unknown never-existed ignored
--- a/tests/test-subrepo-deep-nested-change.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-subrepo-deep-nested-change.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1,3 +1,7 @@
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ cat >> $HGRCPATH <<EOF
   > [extdiff]
   > # for portability:
--- a/tests/test-subrepo-missing.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-subrepo-missing.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1,3 +1,7 @@
+TODO: fix rhg bugs that make this test fail when status is enabled
+  $ unset RHG_STATUS
+
+
   $ hg init repo
   $ cd repo
   $ hg init subrepo
--- a/tests/test-subrepo.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-subrepo.t	Wed Dec 08 10:14:37 2021 +0100
@@ -278,7 +278,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
   starting 4 threads for background file closing (?)
-   .hgsubstate: versions differ -> m (premerge)
+   .hgsubstate: versions differ -> m
   subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
     subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
   getting subrepo t
@@ -304,7 +304,7 @@
    branchmerge: True, force: False, partial: False
    ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
   starting 4 threads for background file closing (?)
-   .hgsubstate: versions differ -> m (premerge)
+   .hgsubstate: versions differ -> m
   subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
     subrepo t: both sides changed 
    subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
@@ -317,13 +317,10 @@
    ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
   starting 4 threads for background file closing (?)
    preserving t for resolve of t
-   t: versions differ -> m (premerge)
+   t: versions differ -> m
   picked tool ':merge' for t (binary False symlink False changedelete False)
   merging t
   my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
-   t: versions differ -> m (merge)
-  picked tool ':merge' for t (binary False symlink False changedelete False)
-  my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
   warning: conflicts while merging t! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
@@ -1021,37 +1018,21 @@
 
 test if untracked file is not overwritten
 
-(this also tests that updated .hgsubstate is treated as "modified",
-when 'merge.update()' is aborted before 'merge.recordupdates()', even
-if none of mode, size and timestamp of it isn't changed on the
-filesystem (see also issue4583))
+(this tests also has a change to update .hgsubstate and merge it within the
+same second. It should mark is are modified , even if none of mode, size and
+timestamp of it isn't changed on the filesystem (see also issue4583))
 
   $ echo issue3276_ok > repo/s/b
   $ hg -R repo2 push -f -q
-  $ touch -t 200001010000 repo/.hgsubstate
 
-  $ cat >> repo/.hg/hgrc <<EOF
-  > [fakedirstatewritetime]
-  > # emulate invoking dirstate.write() via repo.status()
-  > # at 2000-01-01 00:00
-  > fakenow = 200001010000
-  > 
-  > [extensions]
-  > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
-  > EOF
   $ hg -R repo update
   b: untracked file differs
   abort: untracked files in working directory differ from files in requested revision (in subrepository "s")
   [255]
-  $ cat >> repo/.hg/hgrc <<EOF
-  > [extensions]
-  > fakedirstatewritetime = !
-  > EOF
 
   $ cat repo/s/b
   issue3276_ok
   $ rm repo/s/b
-  $ touch -t 200001010000 repo/.hgsubstate
   $ hg -R repo revert --all
   reverting repo/.hgsubstate
   reverting subrepo s
--- a/tests/test-template-functions.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-template-functions.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1295,10 +1295,10 @@
   -1
   $ hg log -T '{revset("%d", rev + 1)}\n' -r'tip'
   abort: unknown revision '3'
-  [255]
+  [10]
   $ hg log -T '{revset("%d", rev - 1)}\n' -r'null'
   abort: unknown revision '-2'
-  [255]
+  [10]
 
 Invalid arguments passed to revset()
 
--- a/tests/test-transplant.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-transplant.t	Wed Dec 08 10:14:37 2021 +0100
@@ -1063,7 +1063,7 @@
   $ cat r1
   Y1
   $ hg debugstate | grep ' r1$'
-  n 644          3 unset               r1
+  n   0         -1 unset               r1
   $ hg status -A r1
   M r1
 
--- a/tests/test-up-local-change.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-up-local-change.t	Wed Dec 08 10:14:37 2021 +0100
@@ -46,13 +46,10 @@
    b: remote created -> g
   getting b
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool 'true' for a (binary False symlink False changedelete False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
-   a: versions differ -> m (merge)
-  picked tool 'true' for a (binary False symlink False changedelete False)
-  my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
   launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -72,13 +69,10 @@
   removing b
   starting 4 threads for background file closing (?)
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool 'true' for a (binary False symlink False changedelete False)
   merging a
   my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb
-   a: versions differ -> m (merge)
-  picked tool 'true' for a (binary False symlink False changedelete False)
-  my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb
   launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 1 files removed, 0 files unresolved
@@ -95,13 +89,10 @@
    b: remote created -> g
   getting b
    preserving a for resolve of a
-   a: versions differ -> m (premerge)
+   a: versions differ -> m
   picked tool 'true' for a (binary False symlink False changedelete False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
-   a: versions differ -> m (merge)
-  picked tool 'true' for a (binary False symlink False changedelete False)
-  my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
   launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-update-branches.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-update-branches.t	Wed Dec 08 10:14:37 2021 +0100
@@ -158,47 +158,47 @@
   parent=3
   M sub/suba
 
-  $ revtest '-C dirty linear'   dirty 1 2 -C
+  $ revtest '--clean dirty linear'   dirty 1 2 --clean
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   parent=2
 
-  $ revtest '-c dirty linear'   dirty 1 2 -c
+  $ revtest '--check dirty linear'   dirty 1 2 --check
   abort: uncommitted changes
   parent=1
   M foo
 
-  $ revtest '-m dirty linear'   dirty 1 2 -m
+  $ revtest '--merge dirty linear'   dirty 1 2 --merge
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   parent=2
   M foo
 
-  $ revtest '-m dirty cross'  dirty 3 4 -m
+  $ revtest '--merge dirty cross'  dirty 3 4 --merge
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   parent=4
   M foo
 
-  $ revtest '-c dirtysub linear'   dirtysub 1 2 -c
+  $ revtest '--check dirtysub linear'   dirtysub 1 2 --check
   abort: uncommitted changes in subrepository "sub"
   parent=1
   M sub/suba
 
-  $ norevtest '-c clean same'   clean 2 -c
+  $ norevtest '--check clean same'   clean 2 -c
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   updated to "bd10386d478c: 2"
   1 other heads for branch "default"
   parent=2
 
-  $ revtest '-cC dirty linear'  dirty 1 2 -cC
+  $ revtest '--check --clean dirty linear'  dirty 1 2 "--check --clean"
   abort: cannot specify both --clean and --check
   parent=1
   M foo
 
-  $ revtest '-mc dirty linear'  dirty 1 2 -mc
+  $ revtest '--merge -checkc dirty linear'  dirty 1 2 "--merge --check"
   abort: cannot specify both --check and --merge
   parent=1
   M foo
 
-  $ revtest '-mC dirty linear'  dirty 1 2 -mC
+  $ revtest '--merge -clean dirty linear'  dirty 1 2 "--merge --clean"
   abort: cannot specify both --clean and --merge
   parent=1
   M foo
@@ -211,12 +211,27 @@
   parent=1
   M foo
 
-  $ revtest 'none dirty linear' dirty 1 2 -c
+  $ revtest 'none dirty linear' dirty 1 2 --check
+  abort: uncommitted changes
+  parent=1
+  M foo
+
+  $ revtest '--merge none dirty linear' dirty 1 2 --check
   abort: uncommitted changes
   parent=1
   M foo
 
-  $ revtest 'none dirty linear' dirty 1 2 -C
+  $ revtest '--merge none dirty linear' dirty 1 2 --merge
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  parent=2
+  M foo
+
+  $ revtest '--merge none dirty linear' dirty 1 2 --no-check
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  parent=2
+  M foo
+
+  $ revtest 'none dirty linear' dirty 1 2 --clean
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   parent=2
 
@@ -232,12 +247,17 @@
   parent=2
   M foo
 
-  $ revtest 'none dirty linear' dirty 1 2 -c
+  $ revtest 'none dirty linear' dirty 1 2 --check
   abort: uncommitted changes
   parent=1
   M foo
 
-  $ revtest 'none dirty linear' dirty 1 2 -C
+  $ revtest 'none dirty linear' dirty 1 2 --no-merge
+  abort: uncommitted changes
+  parent=1
+  M foo
+
+  $ revtest 'none dirty linear' dirty 1 2 --clean
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   parent=2
 
--- a/tests/test-walk.t	Mon Dec 06 10:08:04 2021 +0100
+++ b/tests/test-walk.t	Wed Dec 08 10:14:37 2021 +0100
@@ -299,10 +299,10 @@
   f  mammals/skunk                   skunk
   $ hg debugwalk -v .hg
   abort: path 'mammals/.hg' is inside nested repo 'mammals'
-  [255]
+  [10]
   $ hg debugwalk -v ../.hg
   abort: path contains illegal component: .hg
-  [255]
+  [10]
   $ cd ..
 
   $ hg debugwalk -v -Ibeans
@@ -410,16 +410,16 @@
   [255]
   $ hg debugwalk -v .hg
   abort: path contains illegal component: .hg
-  [255]
+  [10]
   $ hg debugwalk -v beans/../.hg
   abort: path contains illegal component: .hg
-  [255]
+  [10]
   $ hg debugwalk -v beans/../.hg/data
   abort: path contains illegal component: .hg/data
-  [255]
+  [10]
   $ hg debugwalk -v beans/.hg
   abort: path 'beans/.hg' is inside nested repo 'beans'
-  [255]
+  [10]
 
 Test explicit paths and excludes: